I0125 05:11:54.701826 4678 anyauthpassword.go:40] Got userIdentityMapping: &user.DefaultInfo{Name:"extended-test-postgresql-replication-0-bwll6-pnjps-user", UID:"b287f8b9-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:54.701866 4678 basicauth.go:45] Login with provider "anypassword" succeeded for login "extended-test-postgresql-replication-0-bwll6-pnjps-user": &user.DefaultInfo{Name:"extended-test-postgresql-replication-0-bwll6-pnjps-user", UID:"b287f8b9-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:54.701892 4678 authenticator.go:38] OAuth authentication succeeded: &user.DefaultInfo{Name:"extended-test-postgresql-replication-0-bwll6-pnjps-user", UID:"b287f8b9-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:54.706103 4678 panics.go:76] GET /oauth/authorize?response_type=token&client_id=openshift-challenging-client: (9.172725ms) 302 goroutine 1564829 [running]: github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog.(*respLogger).recordStatus(0xc435fd0150, 0x12e) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog/log.go:219 +0xbb github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog.(*respLogger).WriteHeader(0xc435fd0150, 0x12e) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog/log.go:198 +0x35 github.com/openshift/origin/vendor/github.com/RangelReale/osin.OutputJSON(0xc430bbe7e0, 0x9292920, 0xc435fd0150, 0xc4382c9680, 0xc435fd0150, 0x1065900) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/github.com/RangelReale/osin/response_json.go:24 +0x23e github.com/openshift/origin/pkg/oauth/server/osinserver.(*Server).handleAuthorize(0xc4224cd280, 0x9292920, 0xc435fd0150, 0xc4382c9680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/oauth/server/osinserver/osinserver.go:101 +0x106 github.com/openshift/origin/pkg/oauth/server/osinserver.(*Server).(github.com/openshift/origin/pkg/oauth/server/osinserver.handleAuthorize)-fm(0x9292920, 0xc435fd0150, 0xc4382c9680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/oauth/server/osinserver/osinserver.go:47 +0x48 net/http.HandlerFunc.ServeHTTP(0xc421d08bb0, 0x9292920, 0xc435fd0150, 0xc4382c9680) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/github.com/gorilla/context.ClearHandler.func1(0x9292920, 0xc435fd0150, 0xc4382c9680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/github.com/gorilla/context/context.go:141 +0x8b net/http.HandlerFunc.ServeHTTP(0xc42259dc80, 0x9292920, 0xc435fd0150, 0xc4382c9680) /usr/local/go/src/net/http/server.go:1726 +0x44 net/http.(*ServeMux).ServeHTTP(0xc422450120, 0x9292920, 0xc435fd0150, 0xc4382c9680) /usr/local/go/src/net/http/server.go:2022 +0x7f net/http.(*ServeMux).ServeHTTP(0xc42132c7b0, 0x9292920, 0xc435fd0150, 0xc4382c9680) /usr/local/go/src/net/http/server.go:2022 +0x7f github.com/openshift/origin/pkg/cmd/server/origin.WithPatternsHandler.func1(0x9292920, 0xc435fd0150, 0xc4382c9680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/cmd/server/origin/master.go:946 +0xcd net/http.HandlerFunc.ServeHTTP(0xc422a6df80, 0x9292920, 0xc435fd0150, 0xc4382c9680) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/pkg/cmd/server/origin.WithAssetServerRedirect.func1(0x9292920, 0xc435fd0150, 0xc4382c9680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/cmd/server/origin/handlers.go:297 +0x7f net/http.HandlerFunc.ServeHTTP(0xc42132d080, 0x9292920, 0xc435fd0150, 0xc4382c9680) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.WithCORS.func1(0x9292920, 0xc435fd0150, 0xc4382c9680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/cors.go:77 +0x1a2 net/http.HandlerFunc.ServeHTTP(0xc421745980, 0x9292920, 0xc435fd0150, 0xc4382c9680) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.WithPanicRecovery.func1(0x9292920, 0xc435fd0150, 0xc4382c9680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/panics.go:75 +0x24a net/http.HandlerFunc.ServeHTTP(0xc42132d1d0, 0x7ff0a8661900, 0xc42b2a3f58, 0xc4382c9680) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.(*timeoutHandler).ServeHTTP.func1(0xc4226e0720, 0x929ba20, 0xc42b2a3f58, 0xc4382c9680, 0xc43537b680) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/timeout.go:78 +0x8d created by github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.(*timeoutHandler).ServeHTTP /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/timeout.go:80 +0x1db [[Go-http-client/2.0] 172.18.7.222:50940] I0125 05:11:54.708767 4678 audit.go:125] 2017-01-25T05:11:54.708730766-05:00 AUDIT: id="316b9107-7fbe-4b0d-a612-085933d5c6f9" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-0-bwll6-pnjps-user" as="" asgroups="" namespace="" uri="/oapi/v1/users/~" I0125 05:11:54.709743 4678 audit.go:45] 2017-01-25T05:11:54.709729305-05:00 AUDIT: id="316b9107-7fbe-4b0d-a612-085933d5c6f9" response="200" I0125 05:11:54.709829 4678 panics.go:76] GET /oapi/v1/users/~: (2.528619ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:54.712159 4678 audit.go:125] 2017-01-25T05:11:54.712131308-05:00 AUDIT: id="732ed561-dd32-4a84-90ea-48c127964d73" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-0-bwll6-pnjps-user" as="" asgroups="" namespace="" uri="/oapi/v1/users/~" I0125 05:11:54.712990 4678 audit.go:45] 2017-01-25T05:11:54.712976067-05:00 AUDIT: id="732ed561-dd32-4a84-90ea-48c127964d73" response="200" I0125 05:11:54.713053 4678 panics.go:76] GET /oapi/v1/users/~: (2.290206ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:54.718373 4678 audit.go:125] 2017-01-25T05:11:54.718334021-05:00 AUDIT: id="921ec1fb-d780-40a9-91af-4e101dfc178e" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-0-bwll6-pnjps-user" as="" asgroups="" namespace="" uri="/oapi/v1/projectrequests" I0125 05:11:54.719557 4678 audit.go:125] 2017-01-25T05:11:54.719528126-05:00 AUDIT: id="13381845-1d7a-449e-86af-cbfe6d5ff3d2" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:11:54.720246 4678 audit.go:125] 2017-01-25T05:11:54.72021517-05:00 AUDIT: id="8910436e-769f-49cf-a829-49979fa09969" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:11:54.721136 4678 audit.go:45] 2017-01-25T05:11:54.721116869-05:00 AUDIT: id="8910436e-769f-49cf-a829-49979fa09969" response="404" I0125 05:11:54.721211 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (1.220867ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.721521 4678 audit.go:45] 2017-01-25T05:11:54.721509532-05:00 AUDIT: id="13381845-1d7a-449e-86af-cbfe6d5ff3d2" response="404" I0125 05:11:54.721557 4678 panics.go:76] GET /oapi/v1/projects/extended-test-postgresql-replication-0-bwll6-pnjps: (2.289745ms) 404 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:54.722562 4678 audit.go:125] 2017-01-25T05:11:54.722533344-05:00 AUDIT: id="5afa0952-3922-460c-b247-e09fe94a705f" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/oapi/v1/namespaces/default/processedtemplates" I0125 05:11:54.723733 4678 audit.go:45] 2017-01-25T05:11:54.723720343-05:00 AUDIT: id="5afa0952-3922-460c-b247-e09fe94a705f" response="201" I0125 05:11:54.723950 4678 panics.go:76] POST /oapi/v1/namespaces/default/processedtemplates: (1.650846ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:54.725116 4678 audit.go:125] 2017-01-25T05:11:54.725089063-05:00 AUDIT: id="eee134e8-fbee-48fd-aa47-dab0ff4a8f04" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects" I0125 05:11:54.726152 4678 audit.go:125] 2017-01-25T05:11:54.72612522-05:00 AUDIT: id="6e750af8-ac6e-4dda-9e34-c88dd0a40c5a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/namespaces" I0125 05:11:54.728121 4678 audit.go:45] 2017-01-25T05:11:54.728105762-05:00 AUDIT: id="6e750af8-ac6e-4dda-9e34-c88dd0a40c5a" response="201" I0125 05:11:54.728179 4678 panics.go:76] POST /api/v1/namespaces: (2.284957ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.729071 4678 audit.go:45] 2017-01-25T05:11:54.729058004-05:00 AUDIT: id="eee134e8-fbee-48fd-aa47-dab0ff4a8f04" response="201" I0125 05:11:54.729124 4678 panics.go:76] POST /oapi/v1/projects: (4.268282ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:54.730327 4678 audit.go:125] 2017-01-25T05:11:54.730292137-05:00 AUDIT: id="890ce9e9-436a-414a-8a43-b4c70cd335a6" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts" I0125 05:11:54.732521 4678 audit.go:125] 2017-01-25T05:11:54.732490058-05:00 AUDIT: id="69a8ca19-e4f4-42d7-af8f-63cd95e5b6a4" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings" I0125 05:11:54.734403 4678 audit.go:45] 2017-01-25T05:11:54.734388216-05:00 AUDIT: id="890ce9e9-436a-414a-8a43-b4c70cd335a6" response="201" I0125 05:11:54.734461 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts: (4.433807ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.738174 4678 audit.go:125] 2017-01-25T05:11:54.738138313-05:00 AUDIT: id="3df17278-dd05-4026-9265-d56394b99402" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:54.738899 4678 audit.go:125] 2017-01-25T05:11:54.738868741-05:00 AUDIT: id="42f1f976-a348-4cee-b4dd-2818333e3276" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:54.739835 4678 audit.go:125] 2017-01-25T05:11:54.739802018-05:00 AUDIT: id="3a05ad6a-a971-4fd2-827a-0d2960b3acb6" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts" I0125 05:11:54.740441 4678 audit.go:125] 2017-01-25T05:11:54.740411957-05:00 AUDIT: id="a4b9e68d-9bf5-440c-b69c-4fb3957231f1" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:11:54.744634 4678 audit.go:45] 2017-01-25T05:11:54.744618147-05:00 AUDIT: id="3a05ad6a-a971-4fd2-827a-0d2960b3acb6" response="201" I0125 05:11:54.744700 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts: (5.120143ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.745114 4678 audit.go:45] 2017-01-25T05:11:54.745101207-05:00 AUDIT: id="3df17278-dd05-4026-9265-d56394b99402" response="200" I0125 05:11:54.745159 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (7.279952ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.745339 4678 audit.go:45] 2017-01-25T05:11:54.745327334-05:00 AUDIT: id="42f1f976-a348-4cee-b4dd-2818333e3276" response="200" I0125 05:11:54.745384 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (6.749911ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.745629 4678 audit.go:45] 2017-01-25T05:11:54.745617617-05:00 AUDIT: id="a4b9e68d-9bf5-440c-b69c-4fb3957231f1" response="200" I0125 05:11:54.745678 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (5.535782ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.753907 4678 audit.go:125] 2017-01-25T05:11:54.753852431-05:00 AUDIT: id="0d6cb5c4-2f4c-4426-9b16-f75087c397fe" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts" I0125 05:11:54.756780 4678 audit.go:45] 2017-01-25T05:11:54.756765868-05:00 AUDIT: id="0d6cb5c4-2f4c-4426-9b16-f75087c397fe" response="201" I0125 05:11:54.756833 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts: (3.238601ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.757623 4678 audit.go:45] 2017-01-25T05:11:54.757609905-05:00 AUDIT: id="69a8ca19-e4f4-42d7-af8f-63cd95e5b6a4" response="201" I0125 05:11:54.757676 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings: (25.426668ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:54.758255 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (28.866805ms) I0125 05:11:54.758730 4678 audit.go:125] 2017-01-25T05:11:54.758699118-05:00 AUDIT: id="0c4c0cfb-ce3e-4c82-b14a-1bc4cd22c2f6" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings" I0125 05:11:54.761767 4678 audit.go:45] 2017-01-25T05:11:54.761753143-05:00 AUDIT: id="0c4c0cfb-ce3e-4c82-b14a-1bc4cd22c2f6" response="201" I0125 05:11:54.761819 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings: (3.358124ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:54.763068 4678 audit.go:125] 2017-01-25T05:11:54.763037937-05:00 AUDIT: id="f52090ea-342b-4c84-b018-6a1af168339c" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings" I0125 05:11:54.766300 4678 audit.go:45] 2017-01-25T05:11:54.766284278-05:00 AUDIT: id="f52090ea-342b-4c84-b018-6a1af168339c" response="201" I0125 05:11:54.766359 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings: (3.555708ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:54.767612 4678 audit.go:125] 2017-01-25T05:11:54.767579928-05:00 AUDIT: id="78ee17e9-541b-42cc-b376-91022d74e3f2" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings" I0125 05:11:54.777806 4678 create_dockercfg_secrets.go:459] Creating token secret "default-token-fbtw9" for service account extended-test-postgresql-replication-0-bwll6-pnjps/default I0125 05:11:54.796922 4678 audit.go:125] 2017-01-25T05:11:54.796515483-05:00 AUDIT: id="26e6d76a-0514-4b0c-a6f2-61af323eb39d" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:54.798909 4678 audit.go:125] 2017-01-25T05:11:54.798770943-05:00 AUDIT: id="41a684cb-2290-49b8-bfd4-7367b7449e08" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:54.800334 4678 audit.go:125] 2017-01-25T05:11:54.800291913-05:00 AUDIT: id="36a4c0e5-4564-485e-9a24-0cbf4c0859be" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:54.800433 4678 audit.go:125] 2017-01-25T05:11:54.80029123-05:00 AUDIT: id="02700589-b014-45b3-afdb-59293b8d1684" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts" I0125 05:11:54.823974 4678 audit.go:125] 2017-01-25T05:11:54.823909221-05:00 AUDIT: id="7653f806-d556-443b-9b5f-80ff8b6bb79f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer" I0125 05:11:54.824259 4678 audit.go:125] 2017-01-25T05:11:54.824194243-05:00 AUDIT: id="f4d63dfe-7177-4a62-aa9c-9e3c7db57dd7" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer" I0125 05:11:54.830753 4678 audit.go:125] 2017-01-25T05:11:54.830700844-05:00 AUDIT: id="9b3eb686-9acc-46a7-b0a4-709b6681f298" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:54.835182 4678 audit.go:125] 2017-01-25T05:11:54.835132-05:00 AUDIT: id="05afc1e6-adb7-4524-8af9-62d53c2d843e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas" I0125 05:11:54.873000 4678 audit.go:45] 2017-01-25T05:11:54.872971266-05:00 AUDIT: id="05afc1e6-adb7-4524-8af9-62d53c2d843e" response="200" I0125 05:11:54.873131 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas: (38.798772ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.874771 4678 audit.go:45] 2017-01-25T05:11:54.874749-05:00 AUDIT: id="41a684cb-2290-49b8-bfd4-7367b7449e08" response="200" I0125 05:11:54.874854 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (76.425387ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.875152 4678 audit.go:45] 2017-01-25T05:11:54.875135684-05:00 AUDIT: id="f4d63dfe-7177-4a62-aa9c-9e3c7db57dd7" response="200" I0125 05:11:54.875429 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer: (52.220278ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.875830 4678 audit.go:45] 2017-01-25T05:11:54.875814957-05:00 AUDIT: id="7653f806-d556-443b-9b5f-80ff8b6bb79f" response="200" I0125 05:11:54.875898 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer: (52.652151ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.876881 4678 audit.go:45] 2017-01-25T05:11:54.876861522-05:00 AUDIT: id="36a4c0e5-4564-485e-9a24-0cbf4c0859be" response="200" I0125 05:11:54.876954 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (77.206756ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.901160 4678 create_dockercfg_secrets.go:459] Creating token secret "deployer-token-rrz3b" for service account extended-test-postgresql-replication-0-bwll6-pnjps/deployer I0125 05:11:54.901447 4678 tokens_controller.go:401] serviceaccount extended-test-postgresql-replication-0-bwll6-pnjps/deployer is not up to date, skipping token creation I0125 05:11:54.901550 4678 create_dockercfg_secrets.go:459] Creating token secret "builder-token-hc971" for service account extended-test-postgresql-replication-0-bwll6-pnjps/builder I0125 05:11:54.903538 4678 audit.go:45] 2017-01-25T05:11:54.903513872-05:00 AUDIT: id="02700589-b014-45b3-afdb-59293b8d1684" response="409" I0125 05:11:54.903624 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts: (103.679049ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.948107 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (168.094172ms) I0125 05:11:54.950574 4678 container_gc.go:249] Removing container "f29a22668a071e0d1e50ece55052402a83e4fee1353aff2077cc71982b710a21" name "POD" I0125 05:11:54.957989 4678 audit.go:45] 2017-01-25T05:11:54.957961034-05:00 AUDIT: id="78ee17e9-541b-42cc-b376-91022d74e3f2" response="201" I0125 05:11:54.958101 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings: (190.733372ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:54.959577 4678 audit.go:125] 2017-01-25T05:11:54.959486502-05:00 AUDIT: id="befe2e8a-4493-46e3-aa57-b2403e7dc150" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:11:54.960737 4678 audit.go:125] 2017-01-25T05:11:54.960698951-05:00 AUDIT: id="68996cf0-2213-4cfc-9ad0-6f65ac1b2e1a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:54.961870 4678 audit.go:125] 2017-01-25T05:11:54.961827713-05:00 AUDIT: id="134d9c14-00be-4a4e-b864-6dac62cd4305" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:11:54.965780 4678 audit.go:125] 2017-01-25T05:11:54.965726742-05:00 AUDIT: id="250ddf74-121a-4f8c-8ed6-08d5e3dcc4c7" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:54.967867 4678 audit.go:45] 2017-01-25T05:11:54.967697941-05:00 AUDIT: id="134d9c14-00be-4a4e-b864-6dac62cd4305" response="200" I0125 05:11:54.967959 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (6.597036ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.968893 4678 audit.go:125] 2017-01-25T05:11:54.968851997-05:00 AUDIT: id="c6b25e09-9162-4b01-94e9-3fb23a49d945" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:54.973342 4678 audit.go:125] 2017-01-25T05:11:54.973294503-05:00 AUDIT: id="89898a46-f150-44dd-86f9-c58c022eb845" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer" I0125 05:11:54.979184 4678 audit.go:45] 2017-01-25T05:11:54.979160541-05:00 AUDIT: id="68996cf0-2213-4cfc-9ad0-6f65ac1b2e1a" response="201" I0125 05:11:54.979743 4678 audit.go:45] 2017-01-25T05:11:54.979727562-05:00 AUDIT: id="250ddf74-121a-4f8c-8ed6-08d5e3dcc4c7" response="201" I0125 05:11:54.979828 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (14.665219ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.990991 4678 audit.go:45] 2017-01-25T05:11:54.990954187-05:00 AUDIT: id="89898a46-f150-44dd-86f9-c58c022eb845" response="200" I0125 05:11:54.991091 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer: (18.368252ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:54.999298 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (39.125715ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.000095 4678 audit.go:45] 2017-01-25T05:11:55.000071099-05:00 AUDIT: id="26e6d76a-0514-4b0c-a6f2-61af323eb39d" response="201" I0125 05:11:55.000186 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (204.434017ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.000555 4678 audit.go:45] 2017-01-25T05:11:55.00053968-05:00 AUDIT: id="9b3eb686-9acc-46a7-b0a4-709b6681f298" response="201" I0125 05:11:55.000954 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (170.88363ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.005130 4678 create_dockercfg_secrets.go:459] Creating token secret "deployer-token-rrz3b" for service account extended-test-postgresql-replication-0-bwll6-pnjps/deployer I0125 05:11:55.018622 4678 create_dockercfg_secrets.go:459] Creating token secret "default-token-fbtw9" for service account extended-test-postgresql-replication-0-bwll6-pnjps/default I0125 05:11:55.020982 4678 audit.go:45] 2017-01-25T05:11:55.020958379-05:00 AUDIT: id="befe2e8a-4493-46e3-aa57-b2403e7dc150" response="200" I0125 05:11:55.021063 4678 panics.go:76] GET /oapi/v1/projects/extended-test-postgresql-replication-0-bwll6-pnjps: (61.914039ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.022438 4678 audit.go:125] 2017-01-25T05:11:55.022399772-05:00 AUDIT: id="2efa8c2a-d0aa-4132-b5d6-6d08f6290451" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.022985 4678 audit.go:45] 2017-01-25T05:11:55.022972978-05:00 AUDIT: id="2efa8c2a-d0aa-4132-b5d6-6d08f6290451" response="409" I0125 05:11:55.023033 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (911.948µs) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.023396 4678 audit.go:125] 2017-01-25T05:11:55.023367584-05:00 AUDIT: id="ac790032-674b-4647-a452-e039a68dcea8" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.023925 4678 audit.go:125] 2017-01-25T05:11:55.02389699-05:00 AUDIT: id="bce000e0-b792-4fce-8690-e5c4cb6b555b" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.024456 4678 audit.go:125] 2017-01-25T05:11:55.024426538-05:00 AUDIT: id="0d1cf976-ffee-4449-93ea-586d18ae5134" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b" I0125 05:11:55.025076 4678 audit.go:125] 2017-01-25T05:11:55.025046395-05:00 AUDIT: id="4e64d9ba-4f14-45a1-a35c-fb6d9874af7b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9" I0125 05:11:55.027336 4678 audit.go:45] 2017-01-25T05:11:55.027321514-05:00 AUDIT: id="c6b25e09-9162-4b01-94e9-3fb23a49d945" response="201" I0125 05:11:55.027396 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (59.04739ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.027776 4678 audit.go:45] 2017-01-25T05:11:55.027700307-05:00 AUDIT: id="921ec1fb-d780-40a9-91af-4e101dfc178e" response="201" I0125 05:11:55.027879 4678 panics.go:76] POST /oapi/v1/projectrequests: (311.119439ms) 201 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.028268 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-0-bwll6-pnjps/default-token-hcd3s because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "default": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.028551 4678 audit.go:125] 2017-01-25T05:11:55.028517199-05:00 AUDIT: id="ea72a5ef-cc9a-46a5-99a3-c36d7e4a43f8" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:55.028986 4678 audit.go:45] 2017-01-25T05:11:55.028974003-05:00 AUDIT: id="ea72a5ef-cc9a-46a5-99a3-c36d7e4a43f8" response="409" I0125 05:11:55.029030 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (750.808µs) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.029401 4678 audit.go:125] 2017-01-25T05:11:55.029370185-05:00 AUDIT: id="8110c6ed-ad28-474b-b9b1-52d7d2e105b2" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.030344 4678 create_dockercfg_secrets.go:459] Creating token secret "builder-token-hc971" for service account extended-test-postgresql-replication-0-bwll6-pnjps/builder I0125 05:11:55.032143 4678 audit.go:45] 2017-01-25T05:11:55.032129429-05:00 AUDIT: id="0d1cf976-ffee-4449-93ea-586d18ae5134" response="200" I0125 05:11:55.032213 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b: (8.004858ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.032384 4678 audit.go:45] 2017-01-25T05:11:55.032372666-05:00 AUDIT: id="4e64d9ba-4f14-45a1-a35c-fb6d9874af7b" response="200" I0125 05:11:55.032428 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9: (7.607306ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.032865 4678 audit.go:125] 2017-01-25T05:11:55.032831939-05:00 AUDIT: id="271bdcde-6aef-4de6-973a-d4f63cfd5a1a" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-hcd3s" I0125 05:11:55.033635 4678 audit.go:125] 2017-01-25T05:11:55.033604915-05:00 AUDIT: id="d043ad50-ea9f-4c30-ad4e-2105ef038894" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.034162 4678 audit.go:125] 2017-01-25T05:11:55.03413215-05:00 AUDIT: id="3e61086f-a486-41ac-9a40-94920ae8a784" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971" I0125 05:11:55.037152 4678 audit.go:45] 2017-01-25T05:11:55.037138539-05:00 AUDIT: id="8110c6ed-ad28-474b-b9b1-52d7d2e105b2" response="201" I0125 05:11:55.038937 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (9.807858ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.039362 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-0-bwll6-pnjps/builder-token-ngbs7 because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "builder": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.046971 4678 audit.go:45] 2017-01-25T05:11:55.046949179-05:00 AUDIT: id="ac790032-674b-4647-a452-e039a68dcea8" response="409" I0125 05:11:55.047034 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (23.905458ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.047128 4678 audit.go:125] 2017-01-25T05:11:55.047093415-05:00 AUDIT: id="08c9b02d-4222-4ccc-a2ae-7f3ba820b902" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9" I0125 05:11:55.047261 4678 audit.go:45] 2017-01-25T05:11:55.047249489-05:00 AUDIT: id="bce000e0-b792-4fce-8690-e5c4cb6b555b" response="409" I0125 05:11:55.047300 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (23.623768ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.047588 4678 audit.go:45] 2017-01-25T05:11:55.047577161-05:00 AUDIT: id="3e61086f-a486-41ac-9a40-94920ae8a784" response="200" I0125 05:11:55.047633 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971: (13.725907ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.048374 4678 audit.go:125] 2017-01-25T05:11:55.0483413-05:00 AUDIT: id="4ed212cc-30a3-470f-a89c-6565b9e8c0f7" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-ngbs7" I0125 05:11:55.049070 4678 audit.go:125] 2017-01-25T05:11:55.04903999-05:00 AUDIT: id="c32e4fea-a5b5-435d-971a-a522367986de" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b" I0125 05:11:55.049577 4678 audit.go:125] 2017-01-25T05:11:55.049548308-05:00 AUDIT: id="e588ec69-41a1-47df-a367-f3743ff07c40" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer" I0125 05:11:55.050826 4678 audit.go:125] 2017-01-25T05:11:55.050793244-05:00 AUDIT: id="db656622-5512-4579-8d16-2398e3013cfd" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-0-bwll6-pnjps-user" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods" I0125 05:11:55.056843 4678 audit.go:45] 2017-01-25T05:11:55.056824407-05:00 AUDIT: id="e588ec69-41a1-47df-a367-f3743ff07c40" response="200" I0125 05:11:55.056910 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer: (7.571298ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.061175 4678 audit.go:45] 2017-01-25T05:11:55.06115986-05:00 AUDIT: id="db656622-5512-4579-8d16-2398e3013cfd" response="200" I0125 05:11:55.061236 4678 audit.go:45] 2017-01-25T05:11:55.061224279-05:00 AUDIT: id="c32e4fea-a5b5-435d-971a-a522367986de" response="200" I0125 05:11:55.061276 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods: (28.981111ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:11:55.061423 4678 audit.go:45] 2017-01-25T05:11:55.06141106-05:00 AUDIT: id="d043ad50-ea9f-4c30-ad4e-2105ef038894" response="409" I0125 05:11:55.061466 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (28.090812ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.061874 4678 audit.go:125] 2017-01-25T05:11:55.0618352-05:00 AUDIT: id="ba6c3e44-5615-41e7-a1b1-85b5b1970f20" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971" I0125 05:11:55.061899 4678 audit.go:45] 2017-01-25T05:11:55.06188842-05:00 AUDIT: id="08c9b02d-4222-4ccc-a2ae-7f3ba820b902" response="200" I0125 05:11:55.062229 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b: (13.401591ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.062276 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9: (15.444167ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.066391 4678 audit.go:125] 2017-01-25T05:11:55.066352055-05:00 AUDIT: id="f582a9d7-ddb0-4227-bd50-a772ba4bfb80" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts?fieldSelector=metadata.name%3Ddefault" I0125 05:11:55.067063 4678 audit.go:45] 2017-01-25T05:11:55.067048152-05:00 AUDIT: id="f582a9d7-ddb0-4227-bd50-a772ba4bfb80" response="200" I0125 05:11:55.068639 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "deployer-dockercfg-w421d" for service account extended-test-postgresql-replication-0-bwll6-pnjps/deployer I0125 05:11:55.069056 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "default-dockercfg-x02dh" for service account extended-test-postgresql-replication-0-bwll6-pnjps/default I0125 05:11:55.069690 4678 audit.go:45] 2017-01-25T05:11:55.069675633-05:00 AUDIT: id="271bdcde-6aef-4de6-973a-d4f63cfd5a1a" response="200" I0125 05:11:55.069741 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-hcd3s: (37.142925ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.069797 4678 audit.go:125] 2017-01-25T05:11:55.069766226-05:00 AUDIT: id="75fda8a9-7515-4ea6-a58e-c91949e2cb30" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.069976 4678 audit.go:45] 2017-01-25T05:11:55.069962755-05:00 AUDIT: id="ba6c3e44-5615-41e7-a1b1-85b5b1970f20" response="200" I0125 05:11:55.070979 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971: (9.400292ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.072580 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "builder-dockercfg-v9shh" for service account extended-test-postgresql-replication-0-bwll6-pnjps/builder I0125 05:11:55.073535 4678 audit.go:125] 2017-01-25T05:11:55.073498657-05:00 AUDIT: id="2f02084b-690f-4301-8149-0a1a9f6a2ef7" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.074235 4678 audit.go:125] 2017-01-25T05:11:55.074174009-05:00 AUDIT: id="44f7ac9b-8d0d-4435-9ae5-6e1ab6e18f56" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.075017 4678 audit.go:125] 2017-01-25T05:11:55.074984258-05:00 AUDIT: id="8da16997-ebb8-47ae-b6e8-ba628e58b24c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.075651 4678 audit.go:45] 2017-01-25T05:11:55.075636578-05:00 AUDIT: id="4ed212cc-30a3-470f-a89c-6565b9e8c0f7" response="200" I0125 05:11:55.075704 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-ngbs7: (27.592641ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.075882 4678 audit.go:45] 2017-01-25T05:11:55.075870117-05:00 AUDIT: id="75fda8a9-7515-4ea6-a58e-c91949e2cb30" response="201" I0125 05:11:55.076682 4678 audit.go:45] 2017-01-25T05:11:55.076668629-05:00 AUDIT: id="44f7ac9b-8d0d-4435-9ae5-6e1ab6e18f56" response="200" I0125 05:11:55.076717 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (7.186603ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.076844 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.927226ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.077075 4678 audit.go:45] 2017-01-25T05:11:55.077062467-05:00 AUDIT: id="2f02084b-690f-4301-8149-0a1a9f6a2ef7" response="200" I0125 05:11:55.077125 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (3.863448ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.077565 4678 audit.go:125] 2017-01-25T05:11:55.077534577-05:00 AUDIT: id="9234e39d-d188-4b32-8d40-2fa6169e89a1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:55.078631 4678 audit.go:45] 2017-01-25T05:11:55.078618451-05:00 AUDIT: id="9234e39d-d188-4b32-8d40-2fa6169e89a1" response="200" I0125 05:11:55.078675 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (1.369414ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.079144 4678 audit.go:125] 2017-01-25T05:11:55.079112835-05:00 AUDIT: id="145d2c20-246a-4ffa-b7f1-f571e316ee38" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:55.079364 4678 audit.go:125] 2017-01-25T05:11:55.079332956-05:00 AUDIT: id="0dd9fcf6-5230-4aaa-b4f4-2f1045485e66" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.079729 4678 audit.go:125] 2017-01-25T05:11:55.079697558-05:00 AUDIT: id="e3201d89-3ff2-4486-98b1-e57ff798228f" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer" I0125 05:11:55.080572 4678 audit.go:125] 2017-01-25T05:11:55.080540535-05:00 AUDIT: id="799f5f89-1da2-4159-af18-1b5efc3db2e0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.081004 4678 audit.go:45] 2017-01-25T05:11:55.080991058-05:00 AUDIT: id="8da16997-ebb8-47ae-b6e8-ba628e58b24c" response="200" I0125 05:11:55.081055 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (6.299947ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.082125 4678 audit.go:45] 2017-01-25T05:11:55.08211144-05:00 AUDIT: id="145d2c20-246a-4ffa-b7f1-f571e316ee38" response="200" I0125 05:11:55.082168 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (3.283966ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.082266 4678 audit.go:45] 2017-01-25T05:11:55.082255505-05:00 AUDIT: id="e3201d89-3ff2-4486-98b1-e57ff798228f" response="200" I0125 05:11:55.082304 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer: (2.81892ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.082911 4678 audit.go:45] 2017-01-25T05:11:55.082898663-05:00 AUDIT: id="0dd9fcf6-5230-4aaa-b4f4-2f1045485e66" response="201" I0125 05:11:55.084105 4678 audit.go:45] 2017-01-25T05:11:55.084091841-05:00 AUDIT: id="799f5f89-1da2-4159-af18-1b5efc3db2e0" response="200" I0125 05:11:55.085002 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (4.681399ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.085061 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (5.966809ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.088625 4678 audit.go:125] 2017-01-25T05:11:55.088590152-05:00 AUDIT: id="be6a60b2-24ff-4b3f-aed4-e868847bedfe" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.090632 4678 audit.go:45] 2017-01-25T05:11:55.090617799-05:00 AUDIT: id="be6a60b2-24ff-4b3f-aed4-e868847bedfe" response="201" I0125 05:11:55.091545 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (3.209001ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.092242 4678 audit.go:125] 2017-01-25T05:11:55.092194111-05:00 AUDIT: id="35f2e5a6-efbf-49af-8814-4fcdb6878c17" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.093632 4678 audit.go:125] 2017-01-25T05:11:55.093595704-05:00 AUDIT: id="17ee360c-a17d-448b-b6d1-bbf3380c90f0" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.094577 4678 audit.go:125] 2017-01-25T05:11:55.094544759-05:00 AUDIT: id="3cce5168-1dd7-40f8-a1e4-f25ac45d1b57" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.095254 4678 audit.go:45] 2017-01-25T05:11:55.095239665-05:00 AUDIT: id="35f2e5a6-efbf-49af-8814-4fcdb6878c17" response="201" I0125 05:11:55.095291 4678 audit.go:125] 2017-01-25T05:11:55.095261739-05:00 AUDIT: id="c5adaef0-bb28-4361-968e-c53f1ac3532c" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.096729 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (4.756039ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.096898 4678 store.go:283] GuaranteedUpdate of kubernetes.io/serviceaccounts/extended-test-postgresql-replication-0-bwll6-pnjps/default failed because of a conflict, going to retry I0125 05:11:55.097011 4678 audit.go:45] 2017-01-25T05:11:55.096999104-05:00 AUDIT: id="c5adaef0-bb28-4361-968e-c53f1ac3532c" response="409" I0125 05:11:55.097057 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (2.037233ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.097240 4678 audit.go:45] 2017-01-25T05:11:55.097227208-05:00 AUDIT: id="17ee360c-a17d-448b-b6d1-bbf3380c90f0" response="200" I0125 05:11:55.097285 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (3.920267ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.097981 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-0-bwll6-pnjps/default-token-73vff because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "default": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.098133 4678 audit.go:125] 2017-01-25T05:11:55.098102164-05:00 AUDIT: id="0113d37e-ae35-497c-b8bf-1c1661dca2f9" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:55.099251 4678 audit.go:125] 2017-01-25T05:11:55.099167774-05:00 AUDIT: id="f7194046-cad2-4fac-9029-a4bc3fb660e1" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-73vff" I0125 05:11:55.101169 4678 audit.go:45] 2017-01-25T05:11:55.101155569-05:00 AUDIT: id="3cce5168-1dd7-40f8-a1e4-f25ac45d1b57" response="201" I0125 05:11:55.101984 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (7.664527ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.102260 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts?fieldSelector=metadata.name%3Ddefault: (36.162453ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:11:55.103973 4678 audit.go:125] 2017-01-25T05:11:55.103940016-05:00 AUDIT: id="35220062-674d-4362-8ec1-6c1fd1f25a33" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:55.105053 4678 audit.go:45] 2017-01-25T05:11:55.105039508-05:00 AUDIT: id="0113d37e-ae35-497c-b8bf-1c1661dca2f9" response="200" I0125 05:11:55.105107 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (7.233785ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.107287 4678 store.go:283] GuaranteedUpdate of kubernetes.io/serviceaccounts/extended-test-postgresql-replication-0-bwll6-pnjps/builder failed because of a conflict, going to retry I0125 05:11:55.107403 4678 audit.go:45] 2017-01-25T05:11:55.107390348-05:00 AUDIT: id="35220062-674d-4362-8ec1-6c1fd1f25a33" response="409" I0125 05:11:55.107449 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (3.750863ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.107865 4678 panics.go:76] GET /oauth/authorize?response_type=token&client_id=openshift-challenging-client: (1.317724ms) 401 [[Go-http-client/2.0] 172.18.7.222:50940] I0125 05:11:55.109700 4678 audit.go:45] 2017-01-25T05:11:55.109685567-05:00 AUDIT: id="f7194046-cad2-4fac-9029-a4bc3fb660e1" response="200" I0125 05:11:55.109743 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-73vff: (10.798572ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.112408 4678 audit.go:125] 2017-01-25T05:11:55.112369562-05:00 AUDIT: id="cc926b18-93e7-4536-8afe-c232671e1b33" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.113171 4678 audit.go:125] 2017-01-25T05:11:55.113138759-05:00 AUDIT: id="a396aefc-29b0-4bfa-857e-8f88abe5fee0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.113876 4678 audit.go:125] 2017-01-25T05:11:55.11384332-05:00 AUDIT: id="5a09221f-9c43-4928-bc77-161e265faac2" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.115683 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:11:55.115698 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:11:55.116347 4678 audit.go:45] 2017-01-25T05:11:55.116331702-05:00 AUDIT: id="a396aefc-29b0-4bfa-857e-8f88abe5fee0" response="200" I0125 05:11:55.116863 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (3.959715ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.118311 4678 audit.go:45] 2017-01-25T05:11:55.118297167-05:00 AUDIT: id="5a09221f-9c43-4928-bc77-161e265faac2" response="200" I0125 05:11:55.118363 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (4.752532ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.118529 4678 audit.go:45] 2017-01-25T05:11:55.118515854-05:00 AUDIT: id="cc926b18-93e7-4536-8afe-c232671e1b33" response="200" I0125 05:11:55.118573 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (6.492684ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.127287 4678 audit.go:125] 2017-01-25T05:11:55.127242245-05:00 AUDIT: id="e795db27-2373-4d06-bb1b-4a78a95cc185" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:11:55.130109 4678 audit.go:45] 2017-01-25T05:11:55.130093956-05:00 AUDIT: id="e795db27-2373-4d06-bb1b-4a78a95cc185" response="201" I0125 05:11:55.131134 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (4.178281ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.133436 4678 audit.go:125] 2017-01-25T05:11:55.133398744-05:00 AUDIT: id="d6c07898-772e-4c7c-9ce9-1161f7690d04" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:11:55.136462 4678 audit.go:45] 2017-01-25T05:11:55.136446974-05:00 AUDIT: id="d6c07898-772e-4c7c-9ce9-1161f7690d04" response="200" I0125 05:11:55.136518 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (3.393957ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.138473 4678 anyauthpassword.go:40] Got userIdentityMapping: &user.DefaultInfo{Name:"extended-test-postgresql-replication-1-34bbd-xd4g8-user", UID:"b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:55.138499 4678 basicauth.go:45] Login with provider "anypassword" succeeded for login "extended-test-postgresql-replication-1-34bbd-xd4g8-user": &user.DefaultInfo{Name:"extended-test-postgresql-replication-1-34bbd-xd4g8-user", UID:"b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:55.138517 4678 authenticator.go:38] OAuth authentication succeeded: &user.DefaultInfo{Name:"extended-test-postgresql-replication-1-34bbd-xd4g8-user", UID:"b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:55.138999 4678 audit.go:125] 2017-01-25T05:11:55.138963888-05:00 AUDIT: id="a56686e9-ec6f-412d-b85a-75f27c2341d6" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:11:55.141774 4678 audit.go:45] 2017-01-25T05:11:55.141758877-05:00 AUDIT: id="a56686e9-ec6f-412d-b85a-75f27c2341d6" response="200" I0125 05:11:55.141826 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (3.117343ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.145297 4678 panics.go:76] GET /oauth/authorize?response_type=token&client_id=openshift-challenging-client: (28.737895ms) 302 goroutine 1567254 [running]: github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog.(*respLogger).recordStatus(0xc42b924460, 0x12e) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog/log.go:219 +0xbb github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog.(*respLogger).WriteHeader(0xc42b924460, 0x12e) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog/log.go:198 +0x35 github.com/openshift/origin/vendor/github.com/RangelReale/osin.OutputJSON(0xc43004ed80, 0x9292920, 0xc42b924460, 0xc427117860, 0xc42b924460, 0x1065900) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/github.com/RangelReale/osin/response_json.go:24 +0x23e github.com/openshift/origin/pkg/oauth/server/osinserver.(*Server).handleAuthorize(0xc4224cd280, 0x9292920, 0xc42b924460, 0xc427117860) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/oauth/server/osinserver/osinserver.go:101 +0x106 github.com/openshift/origin/pkg/oauth/server/osinserver.(*Server).(github.com/openshift/origin/pkg/oauth/server/osinserver.handleAuthorize)-fm(0x9292920, 0xc42b924460, 0xc427117860) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/oauth/server/osinserver/osinserver.go:47 +0x48 net/http.HandlerFunc.ServeHTTP(0xc421d08bb0, 0x9292920, 0xc42b924460, 0xc427117860) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/github.com/gorilla/context.ClearHandler.func1(0x9292920, 0xc42b924460, 0xc427117860) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/github.com/gorilla/context/context.go:141 +0x8b net/http.HandlerFunc.ServeHTTP(0xc42259dc80, 0x9292920, 0xc42b924460, 0xc427117860) /usr/local/go/src/net/http/server.go:1726 +0x44 net/http.(*ServeMux).ServeHTTP(0xc422450120, 0x9292920, 0xc42b924460, 0xc427117860) /usr/local/go/src/net/http/server.go:2022 +0x7f net/http.(*ServeMux).ServeHTTP(0xc42132c7b0, 0x9292920, 0xc42b924460, 0xc427117860) /usr/local/go/src/net/http/server.go:2022 +0x7f github.com/openshift/origin/pkg/cmd/server/origin.WithPatternsHandler.func1(0x9292920, 0xc42b924460, 0xc427117860) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/cmd/server/origin/master.go:946 +0xcd net/http.HandlerFunc.ServeHTTP(0xc422a6df80, 0x9292920, 0xc42b924460, 0xc427117860) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/pkg/cmd/server/origin.WithAssetServerRedirect.func1(0x9292920, 0xc42b924460, 0xc427117860) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/cmd/server/origin/handlers.go:297 +0x7f net/http.HandlerFunc.ServeHTTP(0xc42132d080, 0x9292920, 0xc42b924460, 0xc427117860) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.WithCORS.func1(0x9292920, 0xc42b924460, 0xc427117860) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/cors.go:77 +0x1a2 net/http.HandlerFunc.ServeHTTP(0xc421745980, 0x9292920, 0xc42b924460, 0xc427117860) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.WithPanicRecovery.func1(0x9292920, 0xc42b924460, 0xc427117860) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/panics.go:75 +0x24a net/http.HandlerFunc.ServeHTTP(0xc42132d1d0, 0x7ff0a8661900, 0xc42b1fdf30, 0xc427117860) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.(*timeoutHandler).ServeHTTP.func1(0xc4226e0720, 0x929ba20, 0xc42b1fdf30, 0xc427117860, 0xc43653ff20) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/timeout.go:78 +0x8d created by github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.(*timeoutHandler).ServeHTTP /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/timeout.go:80 +0x1db [[Go-http-client/2.0] 172.18.7.222:50940] I0125 05:11:55.181862 4678 audit.go:125] 2017-01-25T05:11:55.181816718-05:00 AUDIT: id="35c18f15-02e6-441b-b3fa-bc5b8df9388d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:11:55.182917 4678 audit.go:45] 2017-01-25T05:11:55.182902808-05:00 AUDIT: id="35c18f15-02e6-441b-b3fa-bc5b8df9388d" response="200" I0125 05:11:55.182996 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.447999ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.188068 4678 audit.go:125] 2017-01-25T05:11:55.188034273-05:00 AUDIT: id="b73597f9-29c9-4f54-918c-e3dfcc5a850d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="" uri="/oapi/v1/users/~" I0125 05:11:55.189033 4678 audit.go:45] 2017-01-25T05:11:55.189019119-05:00 AUDIT: id="b73597f9-29c9-4f54-918c-e3dfcc5a850d" response="200" I0125 05:11:55.189120 4678 panics.go:76] GET /oapi/v1/users/~: (2.441317ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.191696 4678 audit.go:125] 2017-01-25T05:11:55.191664562-05:00 AUDIT: id="a3abb0f1-17d3-410f-85ae-d3cd6aefe6e6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="" uri="/oapi/v1/users/~" I0125 05:11:55.192590 4678 audit.go:45] 2017-01-25T05:11:55.192575459-05:00 AUDIT: id="a3abb0f1-17d3-410f-85ae-d3cd6aefe6e6" response="200" I0125 05:11:55.192672 4678 panics.go:76] GET /oapi/v1/users/~: (2.373278ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.197727 4678 audit.go:125] 2017-01-25T05:11:55.197692799-05:00 AUDIT: id="21583780-0361-4f8f-8589-bd9fc9cab8b1" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="" uri="/oapi/v1/projectrequests" I0125 05:11:55.198855 4678 audit.go:125] 2017-01-25T05:11:55.198822458-05:00 AUDIT: id="1b97e1ac-b77c-494f-bd5c-54ad799af227" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:55.199530 4678 audit.go:125] 2017-01-25T05:11:55.199500796-05:00 AUDIT: id="aa372718-2bc3-449a-ba28-608fc2f779b3" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:55.200408 4678 audit.go:45] 2017-01-25T05:11:55.200395034-05:00 AUDIT: id="aa372718-2bc3-449a-ba28-608fc2f779b3" response="404" I0125 05:11:55.200464 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.187867ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.200729 4678 audit.go:45] 2017-01-25T05:11:55.200717729-05:00 AUDIT: id="1b97e1ac-b77c-494f-bd5c-54ad799af227" response="404" I0125 05:11:55.200764 4678 panics.go:76] GET /oapi/v1/projects/extended-test-postgresql-replication-1-34bbd-xd4g8: (2.199223ms) 404 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.201749 4678 audit.go:125] 2017-01-25T05:11:55.201723305-05:00 AUDIT: id="50600b4f-06df-4b2a-8c66-5484a7b41033" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/oapi/v1/namespaces/default/processedtemplates" I0125 05:11:55.202898 4678 audit.go:45] 2017-01-25T05:11:55.202885339-05:00 AUDIT: id="50600b4f-06df-4b2a-8c66-5484a7b41033" response="201" I0125 05:11:55.203104 4678 panics.go:76] POST /oapi/v1/namespaces/default/processedtemplates: (1.613783ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.204241 4678 audit.go:125] 2017-01-25T05:11:55.204215047-05:00 AUDIT: id="b72dad57-0a12-4768-a7dd-93e7100dac03" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects" I0125 05:11:55.205215 4678 audit.go:125] 2017-01-25T05:11:55.205174478-05:00 AUDIT: id="8fe0bdb4-57ff-4749-9242-37782b7082fd" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/namespaces" I0125 05:11:55.212153 4678 audit.go:45] 2017-01-25T05:11:55.212131116-05:00 AUDIT: id="8fe0bdb4-57ff-4749-9242-37782b7082fd" response="201" I0125 05:11:55.212247 4678 panics.go:76] POST /api/v1/namespaces: (7.298113ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.213336 4678 audit.go:45] 2017-01-25T05:11:55.213322867-05:00 AUDIT: id="b72dad57-0a12-4768-a7dd-93e7100dac03" response="201" I0125 05:11:55.213391 4678 panics.go:76] POST /oapi/v1/projects: (9.423715ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.214503 4678 audit.go:125] 2017-01-25T05:11:55.214468346-05:00 AUDIT: id="09201830-a342-4ff6-b801-e7e7198afafd" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts" I0125 05:11:55.216763 4678 audit.go:125] 2017-01-25T05:11:55.216730714-05:00 AUDIT: id="2c23c137-c811-470d-91b7-45044ec8b7fb" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings" I0125 05:11:55.218551 4678 audit.go:45] 2017-01-25T05:11:55.218537261-05:00 AUDIT: id="09201830-a342-4ff6-b801-e7e7198afafd" response="201" I0125 05:11:55.218602 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts: (4.420819ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.219469 4678 audit.go:125] 2017-01-25T05:11:55.219432255-05:00 AUDIT: id="0f962ae4-fa55-4b49-8c0c-fc028cabe87c" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default" I0125 05:11:55.220675 4678 audit.go:125] 2017-01-25T05:11:55.220643225-05:00 AUDIT: id="2f953446-ff93-44b1-8f02-6c9b86dee9dc" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:55.221637 4678 audit.go:125] 2017-01-25T05:11:55.221605084-05:00 AUDIT: id="981ad0e1-27c3-4c16-908a-d611fd354c98" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts" I0125 05:11:55.221962 4678 audit.go:45] 2017-01-25T05:11:55.221948459-05:00 AUDIT: id="0f962ae4-fa55-4b49-8c0c-fc028cabe87c" response="200" I0125 05:11:55.222009 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default: (2.829136ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.222443 4678 audit.go:125] 2017-01-25T05:11:55.222411103-05:00 AUDIT: id="7fcb57cf-7f01-4fc2-82d7-cb8a762bee61" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default" I0125 05:11:55.223032 4678 create_dockercfg_secrets.go:459] Creating token secret "default-token-dzq89" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/default I0125 05:11:55.223611 4678 audit.go:45] 2017-01-25T05:11:55.223597356-05:00 AUDIT: id="2f953446-ff93-44b1-8f02-6c9b86dee9dc" response="200" I0125 05:11:55.223660 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (3.246718ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.223793 4678 audit.go:125] 2017-01-25T05:11:55.223763203-05:00 AUDIT: id="79f796bd-da47-4da3-9496-329bad70842f" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.225167 4678 audit.go:45] 2017-01-25T05:11:55.225154788-05:00 AUDIT: id="7fcb57cf-7f01-4fc2-82d7-cb8a762bee61" response="200" I0125 05:11:55.225232 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default: (3.065065ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.225394 4678 audit.go:45] 2017-01-25T05:11:55.225380109-05:00 AUDIT: id="981ad0e1-27c3-4c16-908a-d611fd354c98" response="201" I0125 05:11:55.225436 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts: (4.064488ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.226393 4678 audit.go:125] 2017-01-25T05:11:55.22636231-05:00 AUDIT: id="db4ef836-835b-43d8-a7de-3c3bf3654dec" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas" I0125 05:11:55.228164 4678 tokens_controller.go:401] serviceaccount extended-test-postgresql-replication-1-34bbd-xd4g8/default is not up to date, skipping token creation I0125 05:11:55.229144 4678 audit.go:125] 2017-01-25T05:11:55.229112751-05:00 AUDIT: id="d5988a4d-e81d-481e-ace4-6ec9598b64b9" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.229791 4678 audit.go:125] 2017-01-25T05:11:55.229761226-05:00 AUDIT: id="b90ab194-a0cd-4c90-a1ba-6426ab217f26" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.230730 4678 audit.go:125] 2017-01-25T05:11:55.230696155-05:00 AUDIT: id="c9084781-5222-484d-87da-1b231e46f508" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default" I0125 05:11:55.231345 4678 audit.go:125] 2017-01-25T05:11:55.23131253-05:00 AUDIT: id="f827043a-75f8-419e-a0b3-da271370180c" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts" I0125 05:11:55.232510 4678 audit.go:45] 2017-01-25T05:11:55.232495037-05:00 AUDIT: id="2c23c137-c811-470d-91b7-45044ec8b7fb" response="201" I0125 05:11:55.232572 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings: (16.078829ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.233373 4678 audit.go:45] 2017-01-25T05:11:55.23335977-05:00 AUDIT: id="db4ef836-835b-43d8-a7de-3c3bf3654dec" response="200" I0125 05:11:55.233442 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas: (7.346689ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.235828 4678 audit.go:125] 2017-01-25T05:11:55.235792063-05:00 AUDIT: id="e63567d2-68b8-4646-92ee-4f4e973cbfae" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings" I0125 05:11:55.237557 4678 audit.go:45] 2017-01-25T05:11:55.237543235-05:00 AUDIT: id="d5988a4d-e81d-481e-ace4-6ec9598b64b9" response="200" I0125 05:11:55.237611 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (8.731805ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.245422 4678 audit.go:125] 2017-01-25T05:11:55.24537522-05:00 AUDIT: id="1d633a8e-45bb-477b-85d9-79101bdbf7c5" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.255303 4678 generic.go:145] GenericPLEG: 99f51a19-e2e6-11e6-a4b0-0e6a5cbf0094/3898d0a93eeac7342b1d1f67b3451372ea7edf63c3311b6132453843666e822f: exited -> non-existent I0125 05:11:55.255333 4678 generic.go:145] GenericPLEG: 99f51a19-e2e6-11e6-a4b0-0e6a5cbf0094/f29a22668a071e0d1e50ece55052402a83e4fee1353aff2077cc71982b710a21: exited -> unknown I0125 05:11:55.255351 4678 generic.go:145] GenericPLEG: 6cfb6d02-e2e6-11e6-a4b0-0e6a5cbf0094/ce2d6cc94bf98b5b7784a2c2b9d257512ef4ef0ec7029821c05a57dfa14d2675: unknown -> non-existent I0125 05:11:55.260507 4678 audit.go:45] 2017-01-25T05:11:55.260481331-05:00 AUDIT: id="79f796bd-da47-4da3-9496-329bad70842f" response="201" I0125 05:11:55.260587 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (37.049176ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.261338 4678 audit.go:45] 2017-01-25T05:11:55.261324622-05:00 AUDIT: id="b90ab194-a0cd-4c90-a1ba-6426ab217f26" response="200" I0125 05:11:55.261390 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (31.858927ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.261574 4678 audit.go:45] 2017-01-25T05:11:55.261562541-05:00 AUDIT: id="1d633a8e-45bb-477b-85d9-79101bdbf7c5" response="201" I0125 05:11:55.261827 4678 audit.go:45] 2017-01-25T05:11:55.261814663-05:00 AUDIT: id="c9084781-5222-484d-87da-1b231e46f508" response="200" I0125 05:11:55.261869 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default: (31.409367ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.262048 4678 audit.go:45] 2017-01-25T05:11:55.262036791-05:00 AUDIT: id="f827043a-75f8-419e-a0b3-da271370180c" response="201" I0125 05:11:55.262094 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts: (31.043038ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.262596 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (17.522627ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.264700 4678 create_dockercfg_secrets.go:459] Creating token secret "default-token-dzq89" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/default I0125 05:11:55.264929 4678 create_dockercfg_secrets.go:459] Creating token secret "builder-token-004fh" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/builder I0125 05:11:55.267959 4678 audit.go:125] 2017-01-25T05:11:55.267919298-05:00 AUDIT: id="0b435fee-4f98-4a2c-b302-a945e6f15caf" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.269105 4678 audit.go:125] 2017-01-25T05:11:55.26907216-05:00 AUDIT: id="87c64c32-28ec-4962-b2e0-748fcea5967e" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.269676 4678 audit.go:125] 2017-01-25T05:11:55.269647184-05:00 AUDIT: id="972f4eaf-306a-40f5-948e-83aeb2d7c033" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.270054 4678 audit.go:45] 2017-01-25T05:11:55.270041297-05:00 AUDIT: id="972f4eaf-306a-40f5-948e-83aeb2d7c033" response="409" I0125 05:11:55.270108 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (694.64µs) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.270759 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-1-34bbd-xd4g8/builder-token-j9z46 because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "builder": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.271655 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (57.943554ms) I0125 05:11:55.272531 4678 audit.go:45] 2017-01-25T05:11:55.272517274-05:00 AUDIT: id="87c64c32-28ec-4962-b2e0-748fcea5967e" response="201" I0125 05:11:55.272579 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (3.737474ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.274300 4678 audit.go:125] 2017-01-25T05:11:55.274268841-05:00 AUDIT: id="18db0873-856d-4564-a316-9f6ddc4ca63b" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.275296 4678 audit.go:125] 2017-01-25T05:11:55.275265229-05:00 AUDIT: id="21978cfe-7696-4be4-99f6-fcba1eff2f72" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts" I0125 05:11:55.276090 4678 audit.go:125] 2017-01-25T05:11:55.276053966-05:00 AUDIT: id="ab1e6886-c991-436f-9dee-80dc19b9cd09" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.276734 4678 audit.go:125] 2017-01-25T05:11:55.276700741-05:00 AUDIT: id="57299175-3bd0-45d7-833e-9279ff2044e3" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89" I0125 05:11:55.277379 4678 audit.go:125] 2017-01-25T05:11:55.27734234-05:00 AUDIT: id="649c406e-aca0-4016-aa0b-a531e2cc5c7d" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.277842 4678 audit.go:125] 2017-01-25T05:11:55.277812371-05:00 AUDIT: id="cb5cac01-056a-4512-af55-caa2cecd0c63" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-j9z46" I0125 05:11:55.282172 4678 audit.go:45] 2017-01-25T05:11:55.282156429-05:00 AUDIT: id="ab1e6886-c991-436f-9dee-80dc19b9cd09" response="200" I0125 05:11:55.282242 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (6.404034ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.282410 4678 audit.go:45] 2017-01-25T05:11:55.282399208-05:00 AUDIT: id="649c406e-aca0-4016-aa0b-a531e2cc5c7d" response="200" I0125 05:11:55.282452 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (5.363011ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.290498 4678 audit.go:45] 2017-01-25T05:11:55.290473273-05:00 AUDIT: id="e63567d2-68b8-4646-92ee-4f4e973cbfae" response="201" I0125 05:11:55.290587 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings: (55.009327ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.291131 4678 audit.go:45] 2017-01-25T05:11:55.291118014-05:00 AUDIT: id="21978cfe-7696-4be4-99f6-fcba1eff2f72" response="409" I0125 05:11:55.291177 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts: (16.163892ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.294430 4678 create_dockercfg_secrets.go:459] Creating token secret "deployer-token-1ctp4" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/deployer I0125 05:11:55.296825 4678 audit.go:45] 2017-01-25T05:11:55.29680723-05:00 AUDIT: id="57299175-3bd0-45d7-833e-9279ff2044e3" response="200" I0125 05:11:55.296886 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89: (20.415309ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.297062 4678 audit.go:45] 2017-01-25T05:11:55.297048732-05:00 AUDIT: id="0b435fee-4f98-4a2c-b302-a945e6f15caf" response="409" I0125 05:11:55.297104 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (29.458114ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.298298 4678 audit.go:125] 2017-01-25T05:11:55.298262313-05:00 AUDIT: id="3072cee9-9b24-4ece-8ac4-34355bfb3754" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.299468 4678 audit.go:125] 2017-01-25T05:11:55.299437117-05:00 AUDIT: id="faf22b01-55f7-47ea-bcaa-cf0bdc153cc7" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.300365 4678 audit.go:125] 2017-01-25T05:11:55.300333733-05:00 AUDIT: id="cd1f1a97-2dd0-4cb8-89af-b736df5ce0b1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh" I0125 05:11:55.303163 4678 audit.go:45] 2017-01-25T05:11:55.303148928-05:00 AUDIT: id="3072cee9-9b24-4ece-8ac4-34355bfb3754" response="201" I0125 05:11:55.303299 4678 audit.go:45] 2017-01-25T05:11:55.303288673-05:00 AUDIT: id="18db0873-856d-4564-a316-9f6ddc4ca63b" response="201" I0125 05:11:55.303458 4678 audit.go:45] 2017-01-25T05:11:55.303447568-05:00 AUDIT: id="cd1f1a97-2dd0-4cb8-89af-b736df5ce0b1" response="200" I0125 05:11:55.303500 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh: (3.42939ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.303713 4678 audit.go:45] 2017-01-25T05:11:55.303701378-05:00 AUDIT: id="faf22b01-55f7-47ea-bcaa-cf0bdc153cc7" response="201" I0125 05:11:55.303753 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (4.586363ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.307510 4678 audit.go:45] 2017-01-25T05:11:55.307495189-05:00 AUDIT: id="cb5cac01-056a-4512-af55-caa2cecd0c63" response="200" I0125 05:11:55.307558 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-j9z46: (29.970572ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.308448 4678 audit.go:125] 2017-01-25T05:11:55.308411717-05:00 AUDIT: id="08c56b65-5dca-45b8-b460-8d67ff468680" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings" I0125 05:11:55.310546 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (12.588228ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.310611 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (36.590737ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.311481 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (39.794918ms) I0125 05:11:55.326080 4678 create_dockercfg_secrets.go:459] Creating token secret "deployer-token-1ctp4" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/deployer I0125 05:11:55.328430 4678 audit.go:125] 2017-01-25T05:11:55.328386679-05:00 AUDIT: id="e560428e-9b18-4745-8ba5-a68f59e23b71" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.329024 4678 audit.go:125] 2017-01-25T05:11:55.329000785-05:00 AUDIT: id="25d4fb84-4de0-4242-95d0-7e2f74587b23" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh" I0125 05:11:55.330054 4678 audit.go:125] 2017-01-25T05:11:55.330022728-05:00 AUDIT: id="32fc8549-1b08-4fa2-989e-082a5f6daeed" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.330471 4678 audit.go:45] 2017-01-25T05:11:55.330457446-05:00 AUDIT: id="32fc8549-1b08-4fa2-989e-082a5f6daeed" response="409" I0125 05:11:55.330526 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (736.158µs) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.330893 4678 audit.go:125] 2017-01-25T05:11:55.330863198-05:00 AUDIT: id="e7e6b56b-704c-472b-a26c-f1359d0b22d7" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default" I0125 05:11:55.331414 4678 audit.go:125] 2017-01-25T05:11:55.331385319-05:00 AUDIT: id="948edf13-8dce-425a-9dfa-b943d118f235" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.331925 4678 audit.go:125] 2017-01-25T05:11:55.331895784-05:00 AUDIT: id="c4102e29-f8f7-4996-8f81-d4cc8a8ba018" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.332613 4678 audit.go:125] 2017-01-25T05:11:55.332583612-05:00 AUDIT: id="02364b10-f526-4821-9bf1-ae4c9598e1ce" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4" I0125 05:11:55.334316 4678 audit.go:125] 2017-01-25T05:11:55.334283696-05:00 AUDIT: id="3c0c335c-fbe3-4b33-b498-d6ef5380d0f2" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.334732 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-0389q because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "deployer": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.335278 4678 audit.go:125] 2017-01-25T05:11:55.335242568-05:00 AUDIT: id="4143beba-3b0b-4aa9-94da-16466456e5d9" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89" I0125 05:11:55.335472 4678 audit.go:45] 2017-01-25T05:11:55.335458909-05:00 AUDIT: id="25d4fb84-4de0-4242-95d0-7e2f74587b23" response="200" I0125 05:11:55.335830 4678 audit.go:45] 2017-01-25T05:11:55.335816985-05:00 AUDIT: id="c4102e29-f8f7-4996-8f81-d4cc8a8ba018" response="200" I0125 05:11:55.335885 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (4.187682ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.336350 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh: (7.49861ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.337431 4678 audit.go:125] 2017-01-25T05:11:55.33740028-05:00 AUDIT: id="51b35972-0106-400b-b4e7-ea06a1717e2c" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-0389q" I0125 05:11:55.338887 4678 audit.go:45] 2017-01-25T05:11:55.33887297-05:00 AUDIT: id="e560428e-9b18-4745-8ba5-a68f59e23b71" response="200" I0125 05:11:55.338940 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (10.763812ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.339111 4678 audit.go:45] 2017-01-25T05:11:55.339100605-05:00 AUDIT: id="e7e6b56b-704c-472b-a26c-f1359d0b22d7" response="200" I0125 05:11:55.339155 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default: (8.528018ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.339872 4678 audit.go:45] 2017-01-25T05:11:55.339859128-05:00 AUDIT: id="3c0c335c-fbe3-4b33-b498-d6ef5380d0f2" response="200" I0125 05:11:55.339916 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (5.873733ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.340626 4678 audit.go:45] 2017-01-25T05:11:55.340613409-05:00 AUDIT: id="08c56b65-5dca-45b8-b460-8d67ff468680" response="201" I0125 05:11:55.340687 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings: (32.55079ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.343174 4678 audit.go:45] 2017-01-25T05:11:55.343161431-05:00 AUDIT: id="02364b10-f526-4821-9bf1-ae4c9598e1ce" response="200" I0125 05:11:55.343238 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4: (10.874031ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.343644 4678 audit.go:45] 2017-01-25T05:11:55.34363129-05:00 AUDIT: id="4143beba-3b0b-4aa9-94da-16466456e5d9" response="200" I0125 05:11:55.344563 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89: (9.561815ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.350112 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "builder-dockercfg-l993x" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/builder I0125 05:11:55.353497 4678 audit.go:125] 2017-01-25T05:11:55.353454243-05:00 AUDIT: id="5a2121fb-9cb0-4ae5-81e7-1282aaa17419" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.353520 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "default-dockercfg-03n02" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/default I0125 05:11:55.353770 4678 audit.go:45] 2017-01-25T05:11:55.3537574-05:00 AUDIT: id="948edf13-8dce-425a-9dfa-b943d118f235" response="409" I0125 05:11:55.353825 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (22.675815ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.354174 4678 audit.go:125] 2017-01-25T05:11:55.354142126-05:00 AUDIT: id="280bd7f6-4aaa-4d41-87f1-38f7e112ff7a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings" I0125 05:11:55.354828 4678 audit.go:125] 2017-01-25T05:11:55.354796642-05:00 AUDIT: id="040702b4-e17a-4c49-a712-e9be0ecf0256" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4" I0125 05:11:55.357863 4678 audit.go:125] 2017-01-25T05:11:55.357829015-05:00 AUDIT: id="c2987996-675a-4c5f-b78a-3ea9e16f9204" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.359534 4678 audit.go:45] 2017-01-25T05:11:55.359518692-05:00 AUDIT: id="c2987996-675a-4c5f-b78a-3ea9e16f9204" response="201" I0125 05:11:55.359645 4678 audit.go:45] 2017-01-25T05:11:55.359635082-05:00 AUDIT: id="040702b4-e17a-4c49-a712-e9be0ecf0256" response="200" I0125 05:11:55.359759 4678 audit.go:45] 2017-01-25T05:11:55.359749127-05:00 AUDIT: id="51b35972-0106-400b-b4e7-ea06a1717e2c" response="200" I0125 05:11:55.359805 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-0389q: (22.625444ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.359981 4678 audit.go:45] 2017-01-25T05:11:55.359969604-05:00 AUDIT: id="5a2121fb-9cb0-4ae5-81e7-1282aaa17419" response="201" I0125 05:11:55.362741 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (5.138642ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.362808 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (9.733888ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.362867 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4: (8.29731ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.366251 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "deployer-dockercfg-4rhpp" for service account extended-test-postgresql-replication-1-34bbd-xd4g8/deployer I0125 05:11:55.367559 4678 audit.go:125] 2017-01-25T05:11:55.367516409-05:00 AUDIT: id="6dadbe62-3f57-46c8-b426-7952211dac7f" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.368320 4678 audit.go:125] 2017-01-25T05:11:55.36828883-05:00 AUDIT: id="2a9cc170-ff81-4714-a1ca-00c5144c67e5" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.368834 4678 audit.go:125] 2017-01-25T05:11:55.368804942-05:00 AUDIT: id="6b7199ad-d1e9-4dbb-9b83-52757bb6cf35" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.369461 4678 audit.go:125] 2017-01-25T05:11:55.369431252-05:00 AUDIT: id="78293c68-a27e-410b-bd55-5db64fab458a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.369984 4678 audit.go:125] 2017-01-25T05:11:55.369953684-05:00 AUDIT: id="66e82ae3-dfe4-4679-9cdb-6e0727ac4f65" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.370655 4678 audit.go:125] 2017-01-25T05:11:55.370623377-05:00 AUDIT: id="848a2470-1439-407b-9bc8-a81a2320506b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.375648 4678 audit.go:45] 2017-01-25T05:11:55.375630212-05:00 AUDIT: id="2a9cc170-ff81-4714-a1ca-00c5144c67e5" response="200" I0125 05:11:55.375714 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (7.666703ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.375894 4678 audit.go:45] 2017-01-25T05:11:55.375877406-05:00 AUDIT: id="6b7199ad-d1e9-4dbb-9b83-52757bb6cf35" response="200" I0125 05:11:55.375940 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (7.355787ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.376104 4678 audit.go:45] 2017-01-25T05:11:55.376091671-05:00 AUDIT: id="848a2470-1439-407b-9bc8-a81a2320506b" response="200" I0125 05:11:55.376146 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (5.746172ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.376282 4678 store.go:283] GuaranteedUpdate of kubernetes.io/serviceaccounts/extended-test-postgresql-replication-1-34bbd-xd4g8/builder failed because of a conflict, going to retry I0125 05:11:55.376404 4678 audit.go:45] 2017-01-25T05:11:55.376393134-05:00 AUDIT: id="6dadbe62-3f57-46c8-b426-7952211dac7f" response="409" I0125 05:11:55.376447 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (9.281732ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.377621 4678 audit.go:45] 2017-01-25T05:11:55.377604074-05:00 AUDIT: id="66e82ae3-dfe4-4679-9cdb-6e0727ac4f65" response="200" I0125 05:11:55.377833 4678 audit.go:45] 2017-01-25T05:11:55.377821168-05:00 AUDIT: id="78293c68-a27e-410b-bd55-5db64fab458a" response="201" I0125 05:11:55.378838 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (9.101945ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.378908 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (9.713696ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.388135 4678 audit.go:125] 2017-01-25T05:11:55.388091523-05:00 AUDIT: id="ebeb092d-46cb-4eda-9c5f-137e86f05167" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.389465 4678 audit.go:125] 2017-01-25T05:11:55.389431535-05:00 AUDIT: id="5c703568-8c12-44c1-8ae5-d74bdcb99f5b" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default" I0125 05:11:55.390425 4678 audit.go:125] 2017-01-25T05:11:55.390390753-05:00 AUDIT: id="ef1fb7e5-7dd6-4815-89f4-3e05b7052044" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:55.394495 4678 audit.go:45] 2017-01-25T05:11:55.394479734-05:00 AUDIT: id="ebeb092d-46cb-4eda-9c5f-137e86f05167" response="201" I0125 05:11:55.394676 4678 audit.go:45] 2017-01-25T05:11:55.394664556-05:00 AUDIT: id="ef1fb7e5-7dd6-4815-89f4-3e05b7052044" response="201" I0125 05:11:55.395161 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (7.309746ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.395252 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (5.122602ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.396732 4678 audit.go:45] 2017-01-25T05:11:55.396715233-05:00 AUDIT: id="5c703568-8c12-44c1-8ae5-d74bdcb99f5b" response="200" I0125 05:11:55.396787 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default: (7.62129ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.399099 4678 audit.go:125] 2017-01-25T05:11:55.399058071-05:00 AUDIT: id="f59a5a07-3dff-4634-b0e0-488ac727a0c2" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.400223 4678 audit.go:125] 2017-01-25T05:11:55.400173418-05:00 AUDIT: id="51428985-81ec-4161-b19b-16dd07d576a8" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.402319 4678 audit.go:45] 2017-01-25T05:11:55.402305329-05:00 AUDIT: id="f59a5a07-3dff-4634-b0e0-488ac727a0c2" response="200" I0125 05:11:55.402380 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (3.597681ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.402521 4678 store.go:283] GuaranteedUpdate of kubernetes.io/serviceaccounts/extended-test-postgresql-replication-1-34bbd-xd4g8/deployer failed because of a conflict, going to retry I0125 05:11:55.402629 4678 audit.go:45] 2017-01-25T05:11:55.402617616-05:00 AUDIT: id="51428985-81ec-4161-b19b-16dd07d576a8" response="409" I0125 05:11:55.402673 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (2.742824ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.404174 4678 audit.go:45] 2017-01-25T05:11:55.404164593-05:00 AUDIT: id="280bd7f6-4aaa-4d41-87f1-38f7e112ff7a" response="201" I0125 05:11:55.404242 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings: (50.31409ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.405247 4678 audit.go:125] 2017-01-25T05:11:55.405193477-05:00 AUDIT: id="c8a7fe4a-f925-4176-919c-144611e2f0d8" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:11:55.405333 4678 audit.go:125] 2017-01-25T05:11:55.40530706-05:00 AUDIT: id="6dead781-8c54-4404-8506-5299d7345265" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:55.406055 4678 audit.go:125] 2017-01-25T05:11:55.406024709-05:00 AUDIT: id="dad6bb3e-59cc-46e2-b73c-ed34a4bf8dee" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:55.406763 4678 audit.go:45] 2017-01-25T05:11:55.406749237-05:00 AUDIT: id="c8a7fe4a-f925-4176-919c-144611e2f0d8" response="200" I0125 05:11:55.406814 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (1.87087ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.406996 4678 audit.go:45] 2017-01-25T05:11:55.406983263-05:00 AUDIT: id="dad6bb3e-59cc-46e2-b73c-ed34a4bf8dee" response="200" I0125 05:11:55.407046 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.246966ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.407384 4678 audit.go:45] 2017-01-25T05:11:55.407371693-05:00 AUDIT: id="6dead781-8c54-4404-8506-5299d7345265" response="200" I0125 05:11:55.407431 4678 panics.go:76] GET /oapi/v1/projects/extended-test-postgresql-replication-1-34bbd-xd4g8: (2.368968ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.407656 4678 audit.go:45] 2017-01-25T05:11:55.407647108-05:00 AUDIT: id="21583780-0361-4f8f-8589-bd9fc9cab8b1" response="201" I0125 05:11:55.407734 4678 panics.go:76] POST /oapi/v1/projectrequests: (211.488635ms) 201 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.411262 4678 audit.go:125] 2017-01-25T05:11:55.411222468-05:00 AUDIT: id="76c986c5-fcd2-415b-a986-e589ff19bf35" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:11:55.412254 4678 audit.go:45] 2017-01-25T05:11:55.412240119-05:00 AUDIT: id="76c986c5-fcd2-415b-a986-e589ff19bf35" response="200" I0125 05:11:55.412373 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (2.578488ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:11:55.415312 4678 audit.go:125] 2017-01-25T05:11:55.415278475-05:00 AUDIT: id="a3611be7-3eca-44f1-8cea-649cc388a78e" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:11:55.415747 4678 audit.go:125] 2017-01-25T05:11:55.415714261-05:00 AUDIT: id="df3ae74b-70f4-44b4-87d1-a8f8401e54a0" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts?fieldSelector=metadata.name%3Ddefault" I0125 05:11:55.416127 4678 audit.go:45] 2017-01-25T05:11:55.416113837-05:00 AUDIT: id="df3ae74b-70f4-44b4-87d1-a8f8401e54a0" response="200" I0125 05:11:55.418305 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts?fieldSelector=metadata.name%3Ddefault: (2.80633ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:11:55.418703 4678 audit.go:45] 2017-01-25T05:11:55.418688309-05:00 AUDIT: id="a3611be7-3eca-44f1-8cea-649cc388a78e" response="200" I0125 05:11:55.418751 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (3.732255ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.425702 4678 panics.go:76] GET /oauth/authorize?response_type=token&client_id=openshift-challenging-client: (1.048351ms) 401 [[Go-http-client/2.0] 172.18.7.222:50940] I0125 05:11:55.433259 4678 anyauthpassword.go:40] Got userIdentityMapping: &user.DefaultInfo{Name:"extended-test-postgresql-replication-2-7n81h-cp7jp-user", UID:"b2f79db0-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:55.433290 4678 basicauth.go:45] Login with provider "anypassword" succeeded for login "extended-test-postgresql-replication-2-7n81h-cp7jp-user": &user.DefaultInfo{Name:"extended-test-postgresql-replication-2-7n81h-cp7jp-user", UID:"b2f79db0-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:55.433318 4678 authenticator.go:38] OAuth authentication succeeded: &user.DefaultInfo{Name:"extended-test-postgresql-replication-2-7n81h-cp7jp-user", UID:"b2f79db0-e2e6-11e6-a4b0-0e6a5cbf0094", Groups:[]string(nil), Extra:map[string][]string(nil)} I0125 05:11:55.437575 4678 panics.go:76] GET /oauth/authorize?response_type=token&client_id=openshift-challenging-client: (8.962702ms) 302 goroutine 1569742 [running]: github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog.(*respLogger).recordStatus(0xc434b51650, 0x12e) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog/log.go:219 +0xbb github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog.(*respLogger).WriteHeader(0xc434b51650, 0x12e) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/httplog/log.go:198 +0x35 github.com/openshift/origin/vendor/github.com/RangelReale/osin.OutputJSON(0xc4306821b0, 0x9292920, 0xc434b51650, 0xc42e81e000, 0xc434b51650, 0x1065900) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/github.com/RangelReale/osin/response_json.go:24 +0x23e github.com/openshift/origin/pkg/oauth/server/osinserver.(*Server).handleAuthorize(0xc4224cd280, 0x9292920, 0xc434b51650, 0xc42e81e000) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/oauth/server/osinserver/osinserver.go:101 +0x106 github.com/openshift/origin/pkg/oauth/server/osinserver.(*Server).(github.com/openshift/origin/pkg/oauth/server/osinserver.handleAuthorize)-fm(0x9292920, 0xc434b51650, 0xc42e81e000) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/oauth/server/osinserver/osinserver.go:47 +0x48 net/http.HandlerFunc.ServeHTTP(0xc421d08bb0, 0x9292920, 0xc434b51650, 0xc42e81e000) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/github.com/gorilla/context.ClearHandler.func1(0x9292920, 0xc434b51650, 0xc42e81e000) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/github.com/gorilla/context/context.go:141 +0x8b net/http.HandlerFunc.ServeHTTP(0xc42259dc80, 0x9292920, 0xc434b51650, 0xc42e81e000) /usr/local/go/src/net/http/server.go:1726 +0x44 net/http.(*ServeMux).ServeHTTP(0xc422450120, 0x9292920, 0xc434b51650, 0xc42e81e000) /usr/local/go/src/net/http/server.go:2022 +0x7f net/http.(*ServeMux).ServeHTTP(0xc42132c7b0, 0x9292920, 0xc434b51650, 0xc42e81e000) /usr/local/go/src/net/http/server.go:2022 +0x7f github.com/openshift/origin/pkg/cmd/server/origin.WithPatternsHandler.func1(0x9292920, 0xc434b51650, 0xc42e81e000) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/cmd/server/origin/master.go:946 +0xcd net/http.HandlerFunc.ServeHTTP(0xc422a6df80, 0x9292920, 0xc434b51650, 0xc42e81e000) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/pkg/cmd/server/origin.WithAssetServerRedirect.func1(0x9292920, 0xc434b51650, 0xc42e81e000) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/pkg/cmd/server/origin/handlers.go:297 +0x7f net/http.HandlerFunc.ServeHTTP(0xc42132d080, 0x9292920, 0xc434b51650, 0xc42e81e000) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.WithCORS.func1(0x9292920, 0xc434b51650, 0xc42e81e000) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/cors.go:77 +0x1a2 net/http.HandlerFunc.ServeHTTP(0xc421745980, 0x9292920, 0xc434b51650, 0xc42e81e000) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.WithPanicRecovery.func1(0x9292920, 0xc434b51650, 0xc42e81e000) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/panics.go:75 +0x24a net/http.HandlerFunc.ServeHTTP(0xc42132d1d0, 0x7ff0a8661900, 0xc434c5db08, 0xc42e81e000) /usr/local/go/src/net/http/server.go:1726 +0x44 github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.(*timeoutHandler).ServeHTTP.func1(0xc4226e0720, 0x929ba20, 0xc434c5db08, 0xc42e81e000, 0xc438136ae0) /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/timeout.go:78 +0x8d created by github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters.(*timeoutHandler).ServeHTTP /go/src/github.com/openshift/origin/_output/local/go/src/github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/genericapiserver/filters/timeout.go:80 +0x1db [[Go-http-client/2.0] 172.18.7.222:50940] I0125 05:11:55.440351 4678 audit.go:125] 2017-01-25T05:11:55.440312339-05:00 AUDIT: id="1795c635-5371-4e44-ba55-051dd8b3e06b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-2-7n81h-cp7jp-user" as="" asgroups="" namespace="" uri="/oapi/v1/users/~" I0125 05:11:55.441400 4678 audit.go:45] 2017-01-25T05:11:55.441385094-05:00 AUDIT: id="1795c635-5371-4e44-ba55-051dd8b3e06b" response="200" I0125 05:11:55.441485 4678 panics.go:76] GET /oapi/v1/users/~: (2.729663ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.443741 4678 audit.go:125] 2017-01-25T05:11:55.443708219-05:00 AUDIT: id="978ac127-8cf6-40f6-8b83-a2cc6d6625e5" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-2-7n81h-cp7jp-user" as="" asgroups="" namespace="" uri="/oapi/v1/users/~" I0125 05:11:55.444659 4678 audit.go:45] 2017-01-25T05:11:55.444644531-05:00 AUDIT: id="978ac127-8cf6-40f6-8b83-a2cc6d6625e5" response="200" I0125 05:11:55.444727 4678 panics.go:76] GET /oapi/v1/users/~: (2.480346ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.450383 4678 audit.go:125] 2017-01-25T05:11:55.450339571-05:00 AUDIT: id="ebae420d-4819-47bc-8b1c-549fe8f13c48" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-2-7n81h-cp7jp-user" as="" asgroups="" namespace="" uri="/oapi/v1/projectrequests" I0125 05:11:55.451609 4678 audit.go:125] 2017-01-25T05:11:55.451578241-05:00 AUDIT: id="df87254a-38eb-4f8e-a43d-da04329b8f70" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:11:55.452331 4678 audit.go:125] 2017-01-25T05:11:55.452299947-05:00 AUDIT: id="6d24d6ae-2dbb-4fc3-858d-330e6215810d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:11:55.453296 4678 audit.go:45] 2017-01-25T05:11:55.453282557-05:00 AUDIT: id="6d24d6ae-2dbb-4fc3-858d-330e6215810d" response="404" I0125 05:11:55.453359 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (1.31195ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.453664 4678 audit.go:45] 2017-01-25T05:11:55.453651837-05:00 AUDIT: id="df87254a-38eb-4f8e-a43d-da04329b8f70" response="404" I0125 05:11:55.453704 4678 panics.go:76] GET /oapi/v1/projects/extended-test-postgresql-replication-2-7n81h-cp7jp: (2.39991ms) 404 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.454785 4678 audit.go:125] 2017-01-25T05:11:55.454755512-05:00 AUDIT: id="3b5f7216-a4f1-469a-9454-10dd37088bd0" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/oapi/v1/namespaces/default/processedtemplates" I0125 05:11:55.456004 4678 audit.go:45] 2017-01-25T05:11:55.455986727-05:00 AUDIT: id="3b5f7216-a4f1-469a-9454-10dd37088bd0" response="201" I0125 05:11:55.456242 4678 panics.go:76] POST /oapi/v1/namespaces/default/processedtemplates: (1.731789ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.457495 4678 audit.go:125] 2017-01-25T05:11:55.457466127-05:00 AUDIT: id="b032053a-4659-43dc-900f-2608bc59fc4a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects" I0125 05:11:55.458597 4678 audit.go:125] 2017-01-25T05:11:55.458568838-05:00 AUDIT: id="26992d93-cf28-4712-8b9b-a2027d507d31" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/namespaces" I0125 05:11:55.462151 4678 audit.go:125] 2017-01-25T05:11:55.462115615-05:00 AUDIT: id="6a892c36-e1ff-42c2-a142-22feff43bdc2" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts" I0125 05:11:55.464610 4678 audit.go:45] 2017-01-25T05:11:55.464595338-05:00 AUDIT: id="6a892c36-e1ff-42c2-a142-22feff43bdc2" response="201" I0125 05:11:55.464668 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts: (2.800149ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.467398 4678 audit.go:125] 2017-01-25T05:11:55.467363166-05:00 AUDIT: id="bfb80f46-cf99-495b-925f-e21875b80f48" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:11:55.467583 4678 audit.go:125] 2017-01-25T05:11:55.467554553-05:00 AUDIT: id="36af734d-f2a2-49e1-ae7d-6cc902c21319" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default" I0125 05:11:55.468374 4678 audit.go:125] 2017-01-25T05:11:55.468341131-05:00 AUDIT: id="849b5de6-55a3-4910-8d69-12386127a52a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts" I0125 05:11:55.468437 4678 audit.go:125] 2017-01-25T05:11:55.468409633-05:00 AUDIT: id="4e394628-fa90-4f8b-bf75-dc4a897d326b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default" I0125 05:11:55.470387 4678 audit.go:45] 2017-01-25T05:11:55.47037134-05:00 AUDIT: id="849b5de6-55a3-4910-8d69-12386127a52a" response="201" I0125 05:11:55.470449 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts: (2.352697ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.470633 4678 audit.go:45] 2017-01-25T05:11:55.470617639-05:00 AUDIT: id="36af734d-f2a2-49e1-ae7d-6cc902c21319" response="200" I0125 05:11:55.470679 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default: (3.344494ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.470883 4678 audit.go:45] 2017-01-25T05:11:55.470870293-05:00 AUDIT: id="4e394628-fa90-4f8b-bf75-dc4a897d326b" response="200" I0125 05:11:55.470935 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default: (2.769008ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.471924 4678 create_dockercfg_secrets.go:459] Creating token secret "default-token-xjvj7" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/default I0125 05:11:55.472109 4678 tokens_controller.go:401] serviceaccount extended-test-postgresql-replication-2-7n81h-cp7jp/default is not up to date, skipping token creation I0125 05:11:55.473378 4678 audit.go:125] 2017-01-25T05:11:55.4733433-05:00 AUDIT: id="0a4dd3fc-8be7-41a8-ad67-599665a6bbc1" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts" I0125 05:11:55.474294 4678 audit.go:125] 2017-01-25T05:11:55.474255862-05:00 AUDIT: id="71d727bd-0e64-45a4-835a-6d37d0c04d45" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.474589 4678 audit.go:125] 2017-01-25T05:11:55.474554256-05:00 AUDIT: id="92f4154f-1a78-44a1-80dd-ef534c264805" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.474804 4678 audit.go:125] 2017-01-25T05:11:55.474775733-05:00 AUDIT: id="d5cffc0c-b8df-4c58-95be-bcfe839afbb1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.475095 4678 audit.go:125] 2017-01-25T05:11:55.475064844-05:00 AUDIT: id="4f7ca30a-6c4f-4b7d-a664-193c0d82b492" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default" I0125 05:11:55.476030 4678 audit.go:125] 2017-01-25T05:11:55.475999435-05:00 AUDIT: id="b064f669-fe22-4137-99f0-f48891099837" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas" I0125 05:11:55.477295 4678 audit.go:45] 2017-01-25T05:11:55.477280402-05:00 AUDIT: id="4f7ca30a-6c4f-4b7d-a664-193c0d82b492" response="200" I0125 05:11:55.477351 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default: (2.500355ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.477497 4678 audit.go:45] 2017-01-25T05:11:55.477485055-05:00 AUDIT: id="d5cffc0c-b8df-4c58-95be-bcfe839afbb1" response="200" I0125 05:11:55.477539 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (2.976803ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.477659 4678 audit.go:45] 2017-01-25T05:11:55.477642793-05:00 AUDIT: id="b064f669-fe22-4137-99f0-f48891099837" response="200" I0125 05:11:55.477720 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas: (1.940996ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.479721 4678 audit.go:125] 2017-01-25T05:11:55.479687583-05:00 AUDIT: id="a153abff-c686-45e3-b4bf-a46515bbcb89" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.480362 4678 audit.go:45] 2017-01-25T05:11:55.480347595-05:00 AUDIT: id="26992d93-cf28-4712-8b9b-a2027d507d31" response="201" I0125 05:11:55.480433 4678 panics.go:76] POST /api/v1/namespaces: (22.116824ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.480809 4678 audit.go:125] 2017-01-25T05:11:55.480778787-05:00 AUDIT: id="f0434264-e3f2-40c3-b2b5-19e2e0521db5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.482341 4678 audit.go:45] 2017-01-25T05:11:55.482326456-05:00 AUDIT: id="71d727bd-0e64-45a4-835a-6d37d0c04d45" response="201" I0125 05:11:55.482410 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (8.398655ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.486879 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:11:55.487326 4678 audit.go:45] 2017-01-25T05:11:55.487310135-05:00 AUDIT: id="b032053a-4659-43dc-900f-2608bc59fc4a" response="201" I0125 05:11:55.487387 4678 panics.go:76] POST /oapi/v1/projects: (30.183167ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.487619 4678 create_dockercfg_secrets.go:459] Creating token secret "default-token-xjvj7" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/default I0125 05:11:55.489937 4678 audit.go:45] 2017-01-25T05:11:55.489922567-05:00 AUDIT: id="f0434264-e3f2-40c3-b2b5-19e2e0521db5" response="200" I0125 05:11:55.489987 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (9.448411ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.490163 4678 audit.go:45] 2017-01-25T05:11:55.490152034-05:00 AUDIT: id="a153abff-c686-45e3-b4bf-a46515bbcb89" response="200" I0125 05:11:55.490224 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (10.752043ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.490781 4678 create_dockercfg_secrets.go:459] Creating token secret "deployer-token-m3wb2" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/deployer I0125 05:11:55.491024 4678 audit.go:45] 2017-01-25T05:11:55.491011715-05:00 AUDIT: id="bfb80f46-cf99-495b-925f-e21875b80f48" response="200" I0125 05:11:55.491072 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (23.969066ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.492940 4678 audit.go:45] 2017-01-25T05:11:55.492926529-05:00 AUDIT: id="0a4dd3fc-8be7-41a8-ad67-599665a6bbc1" response="201" I0125 05:11:55.492993 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts: (19.919236ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.493182 4678 audit.go:45] 2017-01-25T05:11:55.49316996-05:00 AUDIT: id="92f4154f-1a78-44a1-80dd-ef534c264805" response="200" I0125 05:11:55.493239 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (18.912711ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.495306 4678 audit.go:125] 2017-01-25T05:11:55.495264855-05:00 AUDIT: id="8f155c7b-9546-45ba-b647-646c74477c3a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.496448 4678 audit.go:125] 2017-01-25T05:11:55.496415423-05:00 AUDIT: id="bce3bf5f-5390-4851-9c64-7c9de2ca7c32" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7" I0125 05:11:55.497040 4678 audit.go:125] 2017-01-25T05:11:55.497011226-05:00 AUDIT: id="becf88fd-3d0a-49ea-a229-7e177ea99a40" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.497570 4678 audit.go:125] 2017-01-25T05:11:55.497540989-05:00 AUDIT: id="4a6a5ec6-bb3c-49ff-9070-cfc05f4d467c" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings" I0125 05:11:55.498171 4678 audit.go:125] 2017-01-25T05:11:55.498141519-05:00 AUDIT: id="90dafe14-8b62-4953-8ca3-718ddfe717d3" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.498664 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (37.27177ms) I0125 05:11:55.498699 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (8.019µs) I0125 05:11:55.498804 4678 create_dockercfg_secrets.go:459] Creating token secret "builder-token-m01kt" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/builder I0125 05:11:55.499388 4678 audit.go:125] 2017-01-25T05:11:55.499357698-05:00 AUDIT: id="58cf8181-9364-4c62-b4c3-1f9bf235bad3" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.499760 4678 audit.go:45] 2017-01-25T05:11:55.499746517-05:00 AUDIT: id="bce3bf5f-5390-4851-9c64-7c9de2ca7c32" response="200" I0125 05:11:55.499811 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7: (3.644514ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.500295 4678 audit.go:125] 2017-01-25T05:11:55.500263059-05:00 AUDIT: id="93bd7b80-c413-4a7b-9498-ead07b2005e2" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.500870 4678 audit.go:45] 2017-01-25T05:11:55.50085706-05:00 AUDIT: id="8f155c7b-9546-45ba-b647-646c74477c3a" response="409" I0125 05:11:55.500920 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (5.995188ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.501825 4678 audit.go:125] 2017-01-25T05:11:55.501792653-05:00 AUDIT: id="090cc109-b358-4cff-9be9-bfccececb4da" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.502720 4678 audit.go:45] 2017-01-25T05:11:55.502707151-05:00 AUDIT: id="becf88fd-3d0a-49ea-a229-7e177ea99a40" response="201" I0125 05:11:55.502769 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (5.987755ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.504984 4678 create_dockercfg_secrets.go:459] Creating token secret "deployer-token-m3wb2" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/deployer I0125 05:11:55.504993 4678 audit.go:45] 2017-01-25T05:11:55.5049806-05:00 AUDIT: id="090cc109-b358-4cff-9be9-bfccececb4da" response="201" I0125 05:11:55.505038 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (3.472032ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.507346 4678 audit.go:45] 2017-01-25T05:11:55.507332487-05:00 AUDIT: id="58cf8181-9364-4c62-b4c3-1f9bf235bad3" response="201" I0125 05:11:55.507967 4678 audit.go:45] 2017-01-25T05:11:55.507952449-05:00 AUDIT: id="93bd7b80-c413-4a7b-9498-ead07b2005e2" response="201" I0125 05:11:55.508124 4678 audit.go:45] 2017-01-25T05:11:55.508113428-05:00 AUDIT: id="90dafe14-8b62-4953-8ca3-718ddfe717d3" response="201" I0125 05:11:55.508685 4678 audit.go:125] 2017-01-25T05:11:55.508652291-05:00 AUDIT: id="31a01982-e631-48b2-881a-184cf0420d14" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.509382 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (10.250058ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.509514 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (9.498537ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.509568 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (11.657228ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.511360 4678 create_dockercfg_secrets.go:459] Creating token secret "builder-token-m01kt" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/builder I0125 05:11:55.513950 4678 audit.go:125] 2017-01-25T05:11:55.513908701-05:00 AUDIT: id="02da30b0-4e9f-457f-8464-6e4d11599057" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.514473 4678 audit.go:45] 2017-01-25T05:11:55.514458984-05:00 AUDIT: id="02da30b0-4e9f-457f-8464-6e4d11599057" response="409" I0125 05:11:55.514526 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (843.944µs) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.514867 4678 audit.go:125] 2017-01-25T05:11:55.514836913-05:00 AUDIT: id="0f030069-743c-4f35-8d34-1aed94de694c" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.515415 4678 audit.go:125] 2017-01-25T05:11:55.515384681-05:00 AUDIT: id="3f583f56-e003-4baa-a1c6-628e29963e6f" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7" I0125 05:11:55.515949 4678 audit.go:125] 2017-01-25T05:11:55.515904542-05:00 AUDIT: id="a50e05cd-a742-4ed1-b466-7ccaef2c1fe3" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2" I0125 05:11:55.516159 4678 audit.go:45] 2017-01-25T05:11:55.516146778-05:00 AUDIT: id="4a6a5ec6-bb3c-49ff-9070-cfc05f4d467c" response="201" I0125 05:11:55.516239 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings: (18.923993ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.516578 4678 audit.go:125] 2017-01-25T05:11:55.516546424-05:00 AUDIT: id="5c6f100d-979c-48b3-ae72-57eee08b00bc" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default" I0125 05:11:55.516877 4678 audit.go:45] 2017-01-25T05:11:55.516862996-05:00 AUDIT: id="a50e05cd-a742-4ed1-b466-7ccaef2c1fe3" response="200" I0125 05:11:55.516923 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2: (1.240932ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.517109 4678 audit.go:125] 2017-01-25T05:11:55.517078753-05:00 AUDIT: id="6e45f204-5f53-4fe9-8906-fcd0eee71204" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt" I0125 05:11:55.517446 4678 audit.go:125] 2017-01-25T05:11:55.517414586-05:00 AUDIT: id="1e887577-ce4f-4f01-95d1-b7501416e497" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.518218 4678 audit.go:45] 2017-01-25T05:11:55.518190266-05:00 AUDIT: id="1e887577-ce4f-4f01-95d1-b7501416e497" response="409" I0125 05:11:55.518262 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (1.082563ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.519364 4678 audit.go:45] 2017-01-25T05:11:55.519350121-05:00 AUDIT: id="6e45f204-5f53-4fe9-8906-fcd0eee71204" response="200" I0125 05:11:55.519428 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt: (2.572606ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.521509 4678 audit.go:45] 2017-01-25T05:11:55.521495126-05:00 AUDIT: id="3f583f56-e003-4baa-a1c6-628e29963e6f" response="200" I0125 05:11:55.522410 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7: (7.257794ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.522608 4678 audit.go:45] 2017-01-25T05:11:55.522594871-05:00 AUDIT: id="5c6f100d-979c-48b3-ae72-57eee08b00bc" response="200" I0125 05:11:55.522649 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default: (6.329563ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.524363 4678 audit.go:45] 2017-01-25T05:11:55.524348741-05:00 AUDIT: id="0f030069-743c-4f35-8d34-1aed94de694c" response="409" I0125 05:11:55.524406 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (9.78852ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.524863 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "default-dockercfg-9lh9b" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/default I0125 05:11:55.525778 4678 audit.go:45] 2017-01-25T05:11:55.525727899-05:00 AUDIT: id="31a01982-e631-48b2-881a-184cf0420d14" response="409" I0125 05:11:55.525824 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (17.424029ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.526486 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-2-7n81h-cp7jp/builder-token-qsxqs because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "builder": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.527817 4678 audit.go:125] 2017-01-25T05:11:55.527780964-05:00 AUDIT: id="c1a00ddb-002c-478d-9a26-9ad6dd4fa0f8" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.529122 4678 audit.go:125] 2017-01-25T05:11:55.529089684-05:00 AUDIT: id="9ce612b1-e9ee-4ce3-b48e-33636bfadcc4" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt" I0125 05:11:55.530368 4678 audit.go:125] 2017-01-25T05:11:55.530334816-05:00 AUDIT: id="07e65e58-fd4b-4fc0-81ba-3fef1e4be94b" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-qsxqs" I0125 05:11:55.531042 4678 audit.go:125] 2017-01-25T05:11:55.53101016-05:00 AUDIT: id="15ab103b-51dc-4eb1-86f6-eb80f16f7f1a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings" I0125 05:11:55.533070 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-2-7n81h-cp7jp/deployer-token-5gpzq because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "deployer": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.533817 4678 audit.go:125] 2017-01-25T05:11:55.533783798-05:00 AUDIT: id="6c1cb6f8-d628-49ce-8cfa-e54e18394c83" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2" I0125 05:11:55.534859 4678 audit.go:125] 2017-01-25T05:11:55.534824897-05:00 AUDIT: id="b715bfc5-6506-4a16-b55c-a9c4a5231870" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-5gpzq" I0125 05:11:55.535523 4678 audit.go:45] 2017-01-25T05:11:55.535508755-05:00 AUDIT: id="9ce612b1-e9ee-4ce3-b48e-33636bfadcc4" response="200" I0125 05:11:55.535843 4678 audit.go:45] 2017-01-25T05:11:55.535830381-05:00 AUDIT: id="c1a00ddb-002c-478d-9a26-9ad6dd4fa0f8" response="201" I0125 05:11:55.536411 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt: (7.557599ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.536483 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (8.962781ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.538104 4678 audit.go:45] 2017-01-25T05:11:55.538090617-05:00 AUDIT: id="15ab103b-51dc-4eb1-86f6-eb80f16f7f1a" response="201" I0125 05:11:55.538168 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings: (7.390776ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.538352 4678 audit.go:45] 2017-01-25T05:11:55.538338344-05:00 AUDIT: id="6c1cb6f8-d628-49ce-8cfa-e54e18394c83" response="200" I0125 05:11:55.539085 4678 audit.go:125] 2017-01-25T05:11:55.539052543-05:00 AUDIT: id="981e49c2-df6b-4813-9931-66437329da2f" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default" I0125 05:11:55.539504 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "builder-dockercfg-mt0j2" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/builder I0125 05:11:55.539862 4678 audit.go:45] 2017-01-25T05:11:55.539849768-05:00 AUDIT: id="981e49c2-df6b-4813-9931-66437329da2f" response="409" I0125 05:11:55.539907 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default: (1.08728ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.540300 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2: (6.749212ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.540892 4678 audit.go:125] 2017-01-25T05:11:55.540860667-05:00 AUDIT: id="b2f6f084-7f8c-4915-be0a-ce11c08ec85f" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.543123 4678 create_dockercfg_secrets.go:496] Creating dockercfg secret "deployer-dockercfg-vxkfr" for service account extended-test-postgresql-replication-2-7n81h-cp7jp/deployer I0125 05:11:55.543807 4678 audit.go:125] 2017-01-25T05:11:55.543773063-05:00 AUDIT: id="eca6885a-c2ef-4bc5-9ad2-cb75a41b72c0" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings" I0125 05:11:55.544172 4678 audit.go:45] 2017-01-25T05:11:55.544158034-05:00 AUDIT: id="07e65e58-fd4b-4fc0-81ba-3fef1e4be94b" response="200" I0125 05:11:55.544232 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-qsxqs: (14.16426ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.544331 4678 audit.go:45] 2017-01-25T05:11:55.544320686-05:00 AUDIT: id="b2f6f084-7f8c-4915-be0a-ce11c08ec85f" response="201" I0125 05:11:55.545806 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (5.16848ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.547044 4678 audit.go:45] 2017-01-25T05:11:55.547030319-05:00 AUDIT: id="b715bfc5-6506-4a16-b55c-a9c4a5231870" response="200" I0125 05:11:55.547094 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-5gpzq: (12.501713ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.549034 4678 audit.go:45] 2017-01-25T05:11:55.549017514-05:00 AUDIT: id="eca6885a-c2ef-4bc5-9ad2-cb75a41b72c0" response="201" I0125 05:11:55.549091 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings: (5.556418ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.549777 4678 audit.go:125] 2017-01-25T05:11:55.549742173-05:00 AUDIT: id="b09e2de6-10f2-4b31-8b76-37a8131d7340" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.550432 4678 audit.go:125] 2017-01-25T05:11:55.550401273-05:00 AUDIT: id="664fbb66-9b06-4f5b-b52f-277639a45318" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.550796 4678 audit.go:125] 2017-01-25T05:11:55.550765869-05:00 AUDIT: id="62f3063f-260b-48f9-945a-96b63a52f2ed" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.551039 4678 audit.go:125] 2017-01-25T05:11:55.551008871-05:00 AUDIT: id="ff81dbd0-b4a7-4cc6-a649-d4a86433de21" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.551392 4678 audit.go:125] 2017-01-25T05:11:55.551358341-05:00 AUDIT: id="c038b7e9-e500-459d-998d-9e27f0a635a8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.551627 4678 audit.go:125] 2017-01-25T05:11:55.551583577-05:00 AUDIT: id="08d0af72-80cf-4b88-9dd6-ef7f558a45a7" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings" I0125 05:11:55.552053 4678 audit.go:125] 2017-01-25T05:11:55.552023345-05:00 AUDIT: id="226173fd-068a-46ae-b7f3-07b9dd63e8c4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.552651 4678 audit.go:125] 2017-01-25T05:11:55.552617174-05:00 AUDIT: id="e969fd63-9f0a-4a24-8b82-dd517b91f52f" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.555303 4678 audit.go:45] 2017-01-25T05:11:55.555289247-05:00 AUDIT: id="b09e2de6-10f2-4b31-8b76-37a8131d7340" response="200" I0125 05:11:55.555356 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (5.882268ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.555524 4678 audit.go:45] 2017-01-25T05:11:55.555512864-05:00 AUDIT: id="226173fd-068a-46ae-b7f3-07b9dd63e8c4" response="200" I0125 05:11:55.555564 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (3.7531ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.555715 4678 audit.go:45] 2017-01-25T05:11:55.555704556-05:00 AUDIT: id="664fbb66-9b06-4f5b-b52f-277639a45318" response="200" I0125 05:11:55.555752 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (5.586279ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.555900 4678 audit.go:45] 2017-01-25T05:11:55.555889335-05:00 AUDIT: id="62f3063f-260b-48f9-945a-96b63a52f2ed" response="200" I0125 05:11:55.555936 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (5.395333ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.556063 4678 audit.go:45] 2017-01-25T05:11:55.556051865-05:00 AUDIT: id="ff81dbd0-b4a7-4cc6-a649-d4a86433de21" response="200" I0125 05:11:55.556098 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (5.313757ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.563871 4678 tokens_controller.go:401] serviceaccount extended-test-postgresql-replication-2-7n81h-cp7jp/builder is not up to date, skipping token creation I0125 05:11:55.564514 4678 audit.go:45] 2017-01-25T05:11:55.564493829-05:00 AUDIT: id="c038b7e9-e500-459d-998d-9e27f0a635a8" response="200" I0125 05:11:55.565041 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (13.911287ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.565463 4678 audit.go:45] 2017-01-25T05:11:55.565449018-05:00 AUDIT: id="e969fd63-9f0a-4a24-8b82-dd517b91f52f" response="201" I0125 05:11:55.565722 4678 audit.go:125] 2017-01-25T05:11:55.565687957-05:00 AUDIT: id="7f3f9f67-a9d1-41e9-9c67-2cd339589afc" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.566885 4678 audit.go:125] 2017-01-25T05:11:55.56685301-05:00 AUDIT: id="6851ad98-1cfd-4cf9-8c7a-b552e7bd8c78" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.567520 4678 audit.go:125] 2017-01-25T05:11:55.56748966-05:00 AUDIT: id="cea92250-4372-4bf9-840a-4f1f20adbe36" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default" I0125 05:11:55.568194 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (15.805923ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.568579 4678 audit.go:125] 2017-01-25T05:11:55.568546563-05:00 AUDIT: id="2e2f6a08-a848-4d00-9675-234c3f28d979" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.569557 4678 audit.go:125] 2017-01-25T05:11:55.569518667-05:00 AUDIT: id="2f69a3f9-ae88-4242-ac2d-f08ffe9a39c1" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.570468 4678 audit.go:45] 2017-01-25T05:11:55.570455225-05:00 AUDIT: id="7f3f9f67-a9d1-41e9-9c67-2cd339589afc" response="201" I0125 05:11:55.571467 4678 audit.go:45] 2017-01-25T05:11:55.571453775-05:00 AUDIT: id="6851ad98-1cfd-4cf9-8c7a-b552e7bd8c78" response="200" I0125 05:11:55.571517 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (4.894419ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.571575 4678 audit.go:45] 2017-01-25T05:11:55.57156352-05:00 AUDIT: id="08d0af72-80cf-4b88-9dd6-ef7f558a45a7" response="201" I0125 05:11:55.571629 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings: (20.263348ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.571667 4678 audit.go:45] 2017-01-25T05:11:55.571657278-05:00 AUDIT: id="cea92250-4372-4bf9-840a-4f1f20adbe36" response="200" I0125 05:11:55.571707 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default: (4.450646ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.571760 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (6.33523ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.573472 4678 audit.go:125] 2017-01-25T05:11:55.573436473-05:00 AUDIT: id="1e5ac655-9074-4b5c-ac51-359c68c32844" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.574222 4678 audit.go:45] 2017-01-25T05:11:55.574191538-05:00 AUDIT: id="2f69a3f9-ae88-4242-ac2d-f08ffe9a39c1" response="200" I0125 05:11:55.574275 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (4.97621ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.574692 4678 audit.go:45] 2017-01-25T05:11:55.574679276-05:00 AUDIT: id="2e2f6a08-a848-4d00-9675-234c3f28d979" response="200" I0125 05:11:55.575077 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (6.752498ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.575603 4678 store.go:283] GuaranteedUpdate of kubernetes.io/serviceaccounts/extended-test-postgresql-replication-2-7n81h-cp7jp/deployer failed because of a conflict, going to retry I0125 05:11:55.575724 4678 audit.go:45] 2017-01-25T05:11:55.575712756-05:00 AUDIT: id="1e5ac655-9074-4b5c-ac51-359c68c32844" response="409" I0125 05:11:55.575766 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (2.577839ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.576172 4678 audit.go:125] 2017-01-25T05:11:55.576140934-05:00 AUDIT: id="d9ef60a2-b71d-47c0-bc43-967a3d3661da" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/projects/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:11:55.576825 4678 tokens_controller.go:448] deleting secret extended-test-postgresql-replication-2-7n81h-cp7jp/deployer-token-h40x2 because reference couldn't be added (Operation cannot be fulfilled on serviceaccounts "deployer": the object has been modified; please apply your changes to the latest version and try again) I0125 05:11:55.578369 4678 audit.go:125] 2017-01-25T05:11:55.57833482-05:00 AUDIT: id="d6c37f12-6129-4700-8093-5eab533f2ab8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:11:55.579022 4678 audit.go:125] 2017-01-25T05:11:55.578990014-05:00 AUDIT: id="f8615bf1-4ee0-45b6-af8b-5a5640b79e64" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-h40x2" I0125 05:11:55.580493 4678 audit.go:45] 2017-01-25T05:11:55.580479029-05:00 AUDIT: id="d6c37f12-6129-4700-8093-5eab533f2ab8" response="200" I0125 05:11:55.580549 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (3.023185ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.581185 4678 audit.go:45] 2017-01-25T05:11:55.581171548-05:00 AUDIT: id="d9ef60a2-b71d-47c0-bc43-967a3d3661da" response="200" I0125 05:11:55.581263 4678 panics.go:76] GET /oapi/v1/projects/extended-test-postgresql-replication-2-7n81h-cp7jp: (5.35308ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:55.581520 4678 audit.go:45] 2017-01-25T05:11:55.581508081-05:00 AUDIT: id="ebae420d-4819-47bc-8b1c-549fe8f13c48" response="201" I0125 05:11:55.581613 4678 panics.go:76] POST /oapi/v1/projectrequests: (132.945973ms) 201 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.582210 4678 audit.go:125] 2017-01-25T05:11:55.582163851-05:00 AUDIT: id="a13f5c4f-0023-4179-901f-8fb615a96cb5" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.582538 4678 audit.go:45] 2017-01-25T05:11:55.582524615-05:00 AUDIT: id="f8615bf1-4ee0-45b6-af8b-5a5640b79e64" response="200" I0125 05:11:55.582581 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-h40x2: (3.824465ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.584396 4678 audit.go:45] 2017-01-25T05:11:55.58438059-05:00 AUDIT: id="a13f5c4f-0023-4179-901f-8fb615a96cb5" response="201" I0125 05:11:55.585267 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (3.32736ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.585724 4678 audit.go:125] 2017-01-25T05:11:55.585688892-05:00 AUDIT: id="c89d755e-6149-40a1-9e33-a47c62a494f7" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:11:55.586558 4678 audit.go:125] 2017-01-25T05:11:55.586520018-05:00 AUDIT: id="fe33c6f4-3b17-4b6f-8285-5553006ba1e1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.588305 4678 audit.go:125] 2017-01-25T05:11:55.588274392-05:00 AUDIT: id="878fda6f-873e-4a79-86e3-fda6613d4999" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.588779 4678 audit.go:45] 2017-01-25T05:11:55.588765531-05:00 AUDIT: id="c89d755e-6149-40a1-9e33-a47c62a494f7" response="200" I0125 05:11:55.588927 4678 audit.go:125] 2017-01-25T05:11:55.588896406-05:00 AUDIT: id="c7569283-c638-440a-9a84-2dbe25a9cef8" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:11:55.589088 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (3.615887ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.590150 4678 audit.go:45] 2017-01-25T05:11:55.590137282-05:00 AUDIT: id="878fda6f-873e-4a79-86e3-fda6613d4999" response="200" I0125 05:11:55.590213 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (2.114015ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.590543 4678 audit.go:45] 2017-01-25T05:11:55.590530023-05:00 AUDIT: id="fe33c6f4-3b17-4b6f-8285-5553006ba1e1" response="200" I0125 05:11:55.590586 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (6.609206ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.591515 4678 audit.go:45] 2017-01-25T05:11:55.591498157-05:00 AUDIT: id="c7569283-c638-440a-9a84-2dbe25a9cef8" response="200" I0125 05:11:55.591565 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (2.897892ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.592926 4678 audit.go:125] 2017-01-25T05:11:55.592890549-05:00 AUDIT: id="57dc3eef-a805-45eb-aea9-e63545b30c3d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-2-7n81h-cp7jp-user" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods" I0125 05:11:55.593802 4678 audit.go:45] 2017-01-25T05:11:55.593789255-05:00 AUDIT: id="57dc3eef-a805-45eb-aea9-e63545b30c3d" response="200" I0125 05:11:55.593874 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods: (4.234577ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:11:55.596546 4678 audit.go:125] 2017-01-25T05:11:55.596515612-05:00 AUDIT: id="24601dc7-8051-4750-902a-25cea7095995" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:11:55.597618 4678 audit.go:125] 2017-01-25T05:11:55.597581586-05:00 AUDIT: id="2b718479-d12a-4af2-b53d-8c1a4c21741e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts?fieldSelector=metadata.name%3Ddefault" I0125 05:11:55.597998 4678 audit.go:45] 2017-01-25T05:11:55.597984812-05:00 AUDIT: id="2b718479-d12a-4af2-b53d-8c1a4c21741e" response="200" I0125 05:11:55.599441 4678 audit.go:45] 2017-01-25T05:11:55.599428295-05:00 AUDIT: id="24601dc7-8051-4750-902a-25cea7095995" response="201" I0125 05:11:55.600423 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (4.12171ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.600907 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts?fieldSelector=metadata.name%3Ddefault: (3.525731ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:11:55.602020 4678 audit.go:125] 2017-01-25T05:11:55.601987967-05:00 AUDIT: id="c1d57b04-ddf4-4851-b220-7f049b55e779" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:11:55.603273 4678 audit.go:125] 2017-01-25T05:11:55.603247006-05:00 AUDIT: id="9e3b5909-8c5a-4227-9960-905b76dce0c4" ip="172.18.7.222" method="POST" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes" I0125 05:11:55.605548 4678 audit.go:45] 2017-01-25T05:11:55.605531443-05:00 AUDIT: id="c1d57b04-ddf4-4851-b220-7f049b55e779" response="200" I0125 05:11:55.605644 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (3.867271ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:55.607321 4678 audit.go:45] 2017-01-25T05:11:55.60730655-05:00 AUDIT: id="9e3b5909-8c5a-4227-9960-905b76dce0c4" response="201" I0125 05:11:55.607440 4678 panics.go:76] POST /api/v1/persistentvolumes: (4.426362ms) 201 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:11:55.608586 4678 pv_controller_base.go:579] storeObjectUpdate: adding volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", version 10933 I0125 05:11:55.608614 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Pending, bound to: "", boundByController: false I0125 05:11:55.608627 4678 pv_controller.go:384] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is unused I0125 05:11:55.608634 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Available I0125 05:11:55.611161 4678 audit.go:125] 2017-01-25T05:11:55.611123323-05:00 AUDIT: id="1d3233a7-a5a0-4f77-8b40-5edb8b0b7511" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000/status" I0125 05:11:55.613352 4678 audit.go:45] 2017-01-25T05:11:55.61333654-05:00 AUDIT: id="1d3233a7-a5a0-4f77-8b40-5edb8b0b7511" response="200" I0125 05:11:55.613432 4678 panics.go:76] PUT /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000/status: (4.231477ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:11:55.613959 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10934 I0125 05:11:55.613985 4678 pv_controller.go:672] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" entered phase "Available" I0125 05:11:55.614014 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10934 I0125 05:11:55.614028 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Available, bound to: "", boundByController: false I0125 05:11:55.614041 4678 pv_controller.go:384] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is unused I0125 05:11:55.614051 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Available I0125 05:11:55.614059 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Available already set I0125 05:11:55.684618 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:11:55.714729 4678 audit.go:125] 2017-01-25T05:11:55.714678366-05:00 AUDIT: id="3a93f52e-b4a5-45f4-9d46-4ab93d592186" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/localsubjectaccessreviews" I0125 05:11:55.715637 4678 audit.go:45] 2017-01-25T05:11:55.715622468-05:00 AUDIT: id="3a93f52e-b4a5-45f4-9d46-4ab93d592186" response="201" I0125 05:11:55.715729 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/localsubjectaccessreviews: (2.79396ms) 201 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.722149 4678 audit.go:125] 2017-01-25T05:11:55.722106738-05:00 AUDIT: id="908de6aa-8a84-4ac8-8d31-9978f0825a1f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/ruby" I0125 05:11:55.723400 4678 audit.go:45] 2017-01-25T05:11:55.723384786-05:00 AUDIT: id="908de6aa-8a84-4ac8-8d31-9978f0825a1f" response="200" I0125 05:11:55.723755 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/ruby: (3.282365ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.728297 4678 audit.go:125] 2017-01-25T05:11:55.72825693-05:00 AUDIT: id="89092981-e873-4e17-8281-28e2709e0885" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/nodejs" I0125 05:11:55.729493 4678 audit.go:45] 2017-01-25T05:11:55.729476456-05:00 AUDIT: id="89092981-e873-4e17-8281-28e2709e0885" response="200" I0125 05:11:55.729793 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/nodejs: (3.089263ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.733465 4678 audit.go:125] 2017-01-25T05:11:55.733432457-05:00 AUDIT: id="e22e0585-7c1f-440f-af2c-221613197348" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/perl" I0125 05:11:55.734620 4678 audit.go:45] 2017-01-25T05:11:55.734605349-05:00 AUDIT: id="e22e0585-7c1f-440f-af2c-221613197348" response="200" I0125 05:11:55.735065 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/perl: (2.987299ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.738368 4678 audit.go:125] 2017-01-25T05:11:55.738335546-05:00 AUDIT: id="7d53ac6d-33fb-4d9f-94b0-d625029da7cf" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/php" I0125 05:11:55.739599 4678 audit.go:45] 2017-01-25T05:11:55.739583657-05:00 AUDIT: id="7d53ac6d-33fb-4d9f-94b0-d625029da7cf" response="200" I0125 05:11:55.739919 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/php: (2.84288ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.743588 4678 audit.go:125] 2017-01-25T05:11:55.743552497-05:00 AUDIT: id="f86b8b44-8f55-4c63-8cd6-be216309dcce" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/python" I0125 05:11:55.744871 4678 audit.go:45] 2017-01-25T05:11:55.744856085-05:00 AUDIT: id="f86b8b44-8f55-4c63-8cd6-be216309dcce" response="200" I0125 05:11:55.745369 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/python: (3.325603ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.749378 4678 audit.go:125] 2017-01-25T05:11:55.749343371-05:00 AUDIT: id="b604cd7f-ad02-49e3-8cfb-46ddaa20d75c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/wildfly" I0125 05:11:55.750626 4678 audit.go:45] 2017-01-25T05:11:55.750610899-05:00 AUDIT: id="b604cd7f-ad02-49e3-8cfb-46ddaa20d75c" response="200" I0125 05:11:55.751135 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/wildfly: (3.223628ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.754629 4678 audit.go:125] 2017-01-25T05:11:55.754595007-05:00 AUDIT: id="1ce44681-e62b-4811-86b1-3a1dbc919d29" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/mysql" I0125 05:11:55.755853 4678 audit.go:45] 2017-01-25T05:11:55.755842168-05:00 AUDIT: id="1ce44681-e62b-4811-86b1-3a1dbc919d29" response="200" I0125 05:11:55.756094 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/mysql: (2.958469ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.759273 4678 audit.go:125] 2017-01-25T05:11:55.759248072-05:00 AUDIT: id="010694ae-813f-42f4-90d7-f6158e67d4fe" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/postgresql" I0125 05:11:55.760276 4678 audit.go:45] 2017-01-25T05:11:55.760266561-05:00 AUDIT: id="010694ae-813f-42f4-90d7-f6158e67d4fe" response="200" I0125 05:11:55.760499 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/postgresql: (2.580908ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.763513 4678 audit.go:125] 2017-01-25T05:11:55.763486803-05:00 AUDIT: id="3ba08d82-1bde-4d3b-b400-0f05f6c430eb" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/mongodb" I0125 05:11:55.764489 4678 audit.go:45] 2017-01-25T05:11:55.764479531-05:00 AUDIT: id="3ba08d82-1bde-4d3b-b400-0f05f6c430eb" response="200" I0125 05:11:55.764708 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/mongodb: (2.547007ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.767896 4678 audit.go:125] 2017-01-25T05:11:55.767871445-05:00 AUDIT: id="12ec33fb-0414-45f2-aaff-c239829531ed" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/jenkins" I0125 05:11:55.769000 4678 audit.go:45] 2017-01-25T05:11:55.768990248-05:00 AUDIT: id="12ec33fb-0414-45f2-aaff-c239829531ed" response="200" I0125 05:11:55.769214 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/jenkins: (2.716398ms) 200 [[extended.test/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50942] I0125 05:11:55.782421 4678 generic.go:342] PLEG: Write status for cakephp-mysql-example-1-hook-pre/extended-test-cakephp-mysql-repo-test-4jx76-x2n7w: &container.PodStatus{ID:"99f51a19-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"cakephp-mysql-example-1-hook-pre", Namespace:"extended-test-cakephp-mysql-repo-test-4jx76-x2n7w", IP:"", ContainerStatuses:[]*container.ContainerStatus(nil), SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: Error response from daemon: {"message":"devmapper: Unknown device 990b6e073a1060f629a8b95dfb3d448eecf1949c3710d4b9276dac21ab7e9862"}) E0125 05:11:55.782489 4678 generic.go:238] PLEG: Ignoring events for pod cakephp-mysql-example-1-hook-pre/extended-test-cakephp-mysql-repo-test-4jx76-x2n7w: Error response from daemon: {"message":"devmapper: Unknown device 990b6e073a1060f629a8b95dfb3d448eecf1949c3710d4b9276dac21ab7e9862"} I0125 05:11:55.782510 4678 generic.go:333] PLEG: Delete status for pod "6cfb6d02-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:11:55.784686 4678 container_gc.go:249] Removing container "1b1019c4dbb189263f336af5eb9e4774791c819a70507030c714a837d9c04a3a" name "POD" I0125 05:11:55.785563 4678 kubelet.go:1820] SyncLoop (PLEG): ignore irrelevant event: &pleg.PodLifecycleEvent{ID:"6cfb6d02-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"ce2d6cc94bf98b5b7784a2c2b9d257512ef4ef0ec7029821c05a57dfa14d2675"} W0125 05:11:55.785595 4678 pod_container_deletor.go:77] Container "ce2d6cc94bf98b5b7784a2c2b9d257512ef4ef0ec7029821c05a57dfa14d2675" not found in pod's containers I0125 05:11:56.134753 4678 audit.go:125] 2017-01-25T05:11:56.134705148-05:00 AUDIT: id="94f044a4-ea3b-4fa4-aed5-9e5c1ada205b" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/processedtemplates" I0125 05:11:56.148419 4678 audit.go:45] 2017-01-25T05:11:56.148393036-05:00 AUDIT: id="94f044a4-ea3b-4fa4-aed5-9e5c1ada205b" response="201" I0125 05:11:56.149229 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/processedtemplates: (16.187109ms) 201 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:40450] I0125 05:11:56.157613 4678 audit.go:125] 2017-01-25T05:11:56.157564193-05:00 AUDIT: id="728d0a6c-2242-4927-baa4-b418d0954c80" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims" I0125 05:11:56.158834 4678 audit.go:125] 2017-01-25T05:11:56.158797425-05:00 AUDIT: id="665ee326-9b74-4117-a23f-36818df92ef0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges" I0125 05:11:56.159889 4678 audit.go:45] 2017-01-25T05:11:56.159874386-05:00 AUDIT: id="665ee326-9b74-4117-a23f-36818df92ef0" response="200" I0125 05:11:56.159971 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges: (1.436937ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.160290 4678 admission.go:124] no storage class for claim postgresql-data-claim (generate: ) I0125 05:11:56.161864 4678 audit.go:45] 2017-01-25T05:11:56.16184981-05:00 AUDIT: id="728d0a6c-2242-4927-baa4-b418d0954c80" response="201" I0125 05:11:56.161970 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims: (6.098346ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40450] I0125 05:11:56.162556 4678 pv_controller_base.go:579] storeObjectUpdate: adding claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim", version 10935 I0125 05:11:56.162578 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Pending, bound to: "", bindCompleted: false, boundByController: false I0125 05:11:56.162628 4678 pv_controller.go:234] synchronizing unbound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Available, bound to: "", boundByController: false I0125 05:11:56.162637 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.162649 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.162746 4678 pv_controller.go:746] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" bound to volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:11:56.165342 4678 audit.go:125] 2017-01-25T05:11:56.165304549-05:00 AUDIT: id="8a1aba92-69d3-47ba-b7df-f017a941b95f" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:11:56.170770 4678 audit.go:45] 2017-01-25T05:11:56.170747541-05:00 AUDIT: id="8a1aba92-69d3-47ba-b7df-f017a941b95f" response="200" I0125 05:11:56.170854 4678 panics.go:76] PUT /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (7.388956ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:11:56.172406 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10936 I0125 05:11:56.172440 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Available, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:11:56.172452 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:11:56.172466 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Pending, bound to: "", bindCompleted: false, boundByController: false I0125 05:11:56.172477 4678 pv_controller.go:458] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume not bound yet, waiting for syncClaim to fix it I0125 05:11:56.172696 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10936 I0125 05:11:56.172717 4678 pv_controller.go:757] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.172727 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:11:56.175258 4678 audit.go:125] 2017-01-25T05:11:56.175214795-05:00 AUDIT: id="bc65b15a-d202-4a64-8db2-5fa0a442e39d" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services" I0125 05:11:56.175966 4678 rest.go:568] Service type: ClusterIP does not need health check node port I0125 05:11:56.177605 4678 audit.go:125] 2017-01-25T05:11:56.177568566-05:00 AUDIT: id="897818f3-71e0-4e2c-84b4-4e644ae192e4" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000/status" I0125 05:11:56.178784 4678 audit.go:45] 2017-01-25T05:11:56.178767602-05:00 AUDIT: id="bc65b15a-d202-4a64-8db2-5fa0a442e39d" response="201" I0125 05:11:56.178897 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services: (7.431322ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40450] I0125 05:11:56.180010 4678 secret_creating_controller.go:98] Adding service postgresql-master I0125 05:11:56.180389 4678 config.go:208] Calling handler.OnServiceUpdate() I0125 05:11:56.180406 4678 proxier.go:381] Received update notice: [] I0125 05:11:56.180456 4678 proxier.go:804] Syncing iptables rules I0125 05:11:56.180467 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:11:56.209308 4678 audit.go:45] 2017-01-25T05:11:56.209261934-05:00 AUDIT: id="897818f3-71e0-4e2c-84b4-4e644ae192e4" response="200" I0125 05:11:56.209626 4678 panics.go:76] PUT /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000/status: (36.339943ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:11:56.211649 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:11:56.211695 4678 pv_controller.go:672] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" entered phase "Bound" I0125 05:11:56.211711 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:11:56.211788 4678 pv_controller.go:808] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.212126 4678 audit.go:125] 2017-01-25T05:11:56.212056632-05:00 AUDIT: id="bcfeff8d-78ad-455d-8254-7b310182e26c" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:11:56.212854 4678 audit.go:125] 2017-01-25T05:11:56.212807687-05:00 AUDIT: id="5a05ac01-84f7-4dd9-a8e4-d5c10c4de7df" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services" I0125 05:11:56.213902 4678 rest.go:568] Service type: ClusterIP does not need health check node port I0125 05:11:56.216558 4678 audit.go:125] 2017-01-25T05:11:56.216513719-05:00 AUDIT: id="4532b7a6-7dd9-4ea0-964d-37cda14da97e" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:11:56.216891 4678 audit.go:45] 2017-01-25T05:11:56.216878141-05:00 AUDIT: id="bcfeff8d-78ad-455d-8254-7b310182e26c" response="404" I0125 05:11:56.216999 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (13.905231ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:11:56.217146 4678 audit.go:45] 2017-01-25T05:11:56.217134329-05:00 AUDIT: id="5a05ac01-84f7-4dd9-a8e4-d5c10c4de7df" response="201" I0125 05:11:56.218129 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services: (15.390929ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40450] I0125 05:11:56.220881 4678 secret_creating_controller.go:98] Adding service postgresql-slave I0125 05:11:56.221110 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:11:56.221173 4678 audit.go:45] 2017-01-25T05:11:56.221158102-05:00 AUDIT: id="4532b7a6-7dd9-4ea0-964d-37cda14da97e" response="200" I0125 05:11:56.222362 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (9.327791ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:11:56.222721 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10940 I0125 05:11:56.222748 4678 pv_controller.go:819] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:11:56.222758 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:11:56.232645 4678 audit.go:125] 2017-01-25T05:11:56.232591378-05:00 AUDIT: id="b3ced00a-9d5c-4123-8618-6af0e47cc5ca" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim/status" I0125 05:11:56.233820 4678 audit.go:125] 2017-01-25T05:11:56.233772279-05:00 AUDIT: id="3f01f120-074d-4d5e-bc8a-635a5e935459" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:11:56.234505 4678 audit.go:125] 2017-01-25T05:11:56.234467135-05:00 AUDIT: id="2f8ae98a-875c-4984-b0be-8bda874cbea4" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:11:56.235110 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:11:56.253905 4678 audit.go:45] 2017-01-25T05:11:56.253879761-05:00 AUDIT: id="3f01f120-074d-4d5e-bc8a-635a5e935459" response="201" I0125 05:11:56.254056 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (30.434271ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:11:56.254856 4678 audit.go:45] 2017-01-25T05:11:56.254844065-05:00 AUDIT: id="2f8ae98a-875c-4984-b0be-8bda874cbea4" response="404" I0125 05:11:56.254922 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (30.792788ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:11:56.255624 4678 audit.go:45] 2017-01-25T05:11:56.255604912-05:00 AUDIT: id="b3ced00a-9d5c-4123-8618-6af0e47cc5ca" response="200" I0125 05:11:56.255716 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim/status: (31.148156ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:11:56.256586 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:11:56.257248 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (76.905599ms) I0125 05:11:56.257387 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:11:56.257712 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:11:56.257740 4678 pv_controller.go:608] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" entered phase "Bound" I0125 05:11:56.257753 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.257773 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:11:56.257786 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:11:56.257863 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:11:56.257907 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:11:56.257917 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:11:56.257939 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:11:56.257965 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:11:56.257992 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:11:56.258000 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:11:56.258028 4678 pv_controller_base.go:603] storeObjectUpdate: ignoring claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" version 10935 I0125 05:11:56.261221 4678 audit.go:125] 2017-01-25T05:11:56.261145502-05:00 AUDIT: id="4b7d05a3-0809-4254-a87e-f58e46a3796a" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:11:56.263188 4678 audit.go:45] 2017-01-25T05:11:56.263172233-05:00 AUDIT: id="4b7d05a3-0809-4254-a87e-f58e46a3796a" response="201" I0125 05:11:56.263262 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (4.600024ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:11:56.263772 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (42.098415ms) I0125 05:11:56.263833 4678 pv_controller_base.go:603] storeObjectUpdate: ignoring claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" version 10940 I0125 05:11:56.263846 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:11:56.263863 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:11:56.263882 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:11:56.263891 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:11:56.263897 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.263909 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.263963 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.263971 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:11:56.263978 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:11:56.263983 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:11:56.264037 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:11:56.264047 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:11:56.264094 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:11:56.264102 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:11:56.264118 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:11:56.264135 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:11:56.265044 4678 audit.go:125] 2017-01-25T05:11:56.265006614-05:00 AUDIT: id="dea3f78f-87eb-439c-a71b-70ad4036ca43" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs" I0125 05:11:56.268235 4678 audit.go:45] 2017-01-25T05:11:56.268219369-05:00 AUDIT: id="dea3f78f-87eb-439c-a71b-70ad4036ca43" response="201" I0125 05:11:56.268476 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs: (16.90858ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40450] I0125 05:11:56.268724 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:56.291048 4678 factory.go:110] Adding deployment config "postgresql-master" I0125 05:11:56.293715 4678 audit.go:125] 2017-01-25T05:11:56.293602166-05:00 AUDIT: id="bfcbbd36-1c54-405f-8be4-58a340dad212" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:11:56.296070 4678 audit.go:125] 2017-01-25T05:11:56.295982657-05:00 AUDIT: id="16b6c81d-14f6-4006-ad78-0cf2f1105240" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs" I0125 05:11:56.300046 4678 audit.go:45] 2017-01-25T05:11:56.300029874-05:00 AUDIT: id="bfcbbd36-1c54-405f-8be4-58a340dad212" response="200" I0125 05:11:56.300277 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (7.139574ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.300791 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 1) I0125 05:11:56.301300 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:11:56.301925 4678 audit.go:125] 2017-01-25T05:11:56.301891653-05:00 AUDIT: id="bc216519-81e6-4098-b7b5-6e66488bf867" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/instantiate" I0125 05:11:56.303863 4678 audit.go:125] 2017-01-25T05:11:56.303836651-05:00 AUDIT: id="43a05190-3cdb-4ef2-8d17-c8d1a4cf92d0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/postgresql" I0125 05:11:56.305361 4678 audit.go:45] 2017-01-25T05:11:56.305349082-05:00 AUDIT: id="43a05190-3cdb-4ef2-8d17-c8d1a4cf92d0" response="200" I0125 05:11:56.305650 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/postgresql: (2.026175ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.306037 4678 rest.go:84] New deployment for "postgresql-master" caused by []api.DeploymentCause{api.DeploymentCause{Type:"ImageChange", ImageTrigger:(*api.DeploymentCauseImageTrigger)(0xc431a598f0)}} I0125 05:11:56.308328 4678 audit.go:45] 2017-01-25T05:11:56.30831489-05:00 AUDIT: id="bc216519-81e6-4098-b7b5-6e66488bf867" response="201" I0125 05:11:56.308448 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/instantiate: (6.814909ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.309004 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:11:56.310603 4678 audit.go:125] 2017-01-25T05:11:56.310545088-05:00 AUDIT: id="33efe896-e150-4b2f-b548-8badb428e9f5" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:11:56.313298 4678 audit.go:45] 2017-01-25T05:11:56.313283441-05:00 AUDIT: id="16b6c81d-14f6-4006-ad78-0cf2f1105240" response="201" I0125 05:11:56.313541 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs: (23.695795ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40450] I0125 05:11:56.314039 4678 factory.go:110] Adding deployment config "postgresql-slave" I0125 05:11:56.315109 4678 audit.go:125] 2017-01-25T05:11:56.315074617-05:00 AUDIT: id="fb906d20-d6f8-4671-958f-b6e0b5af7dd4" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status" I0125 05:11:56.317647 4678 audit.go:45] 2017-01-25T05:11:56.317633711-05:00 AUDIT: id="fb906d20-d6f8-4671-958f-b6e0b5af7dd4" response="200" I0125 05:11:56.317765 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status: (2.955461ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.318095 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" (observed generation: 1) I0125 05:11:56.318516 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:11:56.319062 4678 audit.go:125] 2017-01-25T05:11:56.31902756-05:00 AUDIT: id="5beaf9ce-3d80-467d-8609-2d7b14beb2f7" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/instantiate" I0125 05:11:56.320842 4678 audit.go:125] 2017-01-25T05:11:56.320811989-05:00 AUDIT: id="ffea937b-e727-4778-8d50-ffd60879431f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/postgresql" I0125 05:11:56.321923 4678 audit.go:45] 2017-01-25T05:11:56.321907896-05:00 AUDIT: id="ffea937b-e727-4778-8d50-ffd60879431f" response="200" I0125 05:11:56.322154 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/postgresql: (1.549765ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.326420 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:56.343581 4678 rest.go:84] New deployment for "postgresql-slave" caused by []api.DeploymentCause{api.DeploymentCause{Type:"ImageChange", ImageTrigger:(*api.DeploymentCauseImageTrigger)(0xc4352bb730)}} I0125 05:11:56.352487 4678 audit.go:45] 2017-01-25T05:11:56.352463896-05:00 AUDIT: id="5beaf9ce-3d80-467d-8609-2d7b14beb2f7" response="201" I0125 05:11:56.352719 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/instantiate: (33.904631ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.354542 4678 audit.go:45] 2017-01-25T05:11:56.354528425-05:00 AUDIT: id="33efe896-e150-4b2f-b548-8badb428e9f5" response="201" I0125 05:11:56.358228 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:11:56.360065 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.360107 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 0->0 (need 0), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 0->1 I0125 05:11:56.364371 4678 audit.go:125] 2017-01-25T05:11:56.364324548-05:00 AUDIT: id="ee6e0f46-6dcb-4c52-a2d3-51543da9e46c" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:11:56.368717 4678 audit.go:125] 2017-01-25T05:11:56.368671926-05:00 AUDIT: id="5adfb4e0-4591-449b-b803-58e449cc4958" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:11:56.369284 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (58.969098ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.375393 4678 audit.go:125] 2017-01-25T05:11:56.37534752-05:00 AUDIT: id="801238ef-b988-425b-ad78-523d0c88ac88" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:11:56.377035 4678 audit.go:125] 2017-01-25T05:11:56.377002445-05:00 AUDIT: id="50323b1d-6741-4aa2-8010-00cb1d271d79" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:56.381499 4678 audit.go:45] 2017-01-25T05:11:56.381478998-05:00 AUDIT: id="801238ef-b988-425b-ad78-523d0c88ac88" response="200" I0125 05:11:56.381655 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (6.581423ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.393259 4678 audit.go:45] 2017-01-25T05:11:56.393231062-05:00 AUDIT: id="ee6e0f46-6dcb-4c52-a2d3-51543da9e46c" response="200" I0125 05:11:56.394406 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (33.370526ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:11:56.395977 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 2) I0125 05:11:56.396152 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:11:56.396613 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:11:56.397867 4678 kubelet.go:1138] Container garbage collection succeeded I0125 05:11:56.398183 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 0->0 I0125 05:11:56.398370 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (38.324757ms) I0125 05:11:56.398411 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.398436 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (33.618µs) I0125 05:11:56.400771 4678 audit.go:45] 2017-01-25T05:11:56.400737474-05:00 AUDIT: id="5adfb4e0-4591-449b-b803-58e449cc4958" response="201" I0125 05:11:56.401131 4678 audit.go:125] 2017-01-25T05:11:56.401076354-05:00 AUDIT: id="23f5e976-929c-47f1-9ed6-df6bc14b726d" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:11:56.401752 4678 admission.go:77] getting security context constraints for pod postgresql-master-1-deploy (generate: ) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:deploymentconfig-controller ca98311b-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:11:56.401791 4678 admission.go:88] getting security context constraints for pod postgresql-master-1-deploy (generate: ) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:11:56.403175 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (34.836046ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.403845 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.403884 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1, replicas 0->0 (need 0), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 0->1 I0125 05:11:56.404918 4678 audit.go:125] 2017-01-25T05:11:56.404873677-05:00 AUDIT: id="3864b50d-6123-4d0d-8483-b85b15a41871" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:56.409433 4678 audit.go:45] 2017-01-25T05:11:56.409416012-05:00 AUDIT: id="50323b1d-6741-4aa2-8010-00cb1d271d79" response="201" I0125 05:11:56.409513 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (32.779074ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.410690 4678 audit.go:125] 2017-01-25T05:11:56.410637032-05:00 AUDIT: id="14c389af-8696-4b99-8f6a-a5306f03a714" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status" I0125 05:11:56.413801 4678 audit.go:45] 2017-01-25T05:11:56.413784564-05:00 AUDIT: id="14c389af-8696-4b99-8f6a-a5306f03a714" response="200" I0125 05:11:56.414947 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status: (9.027605ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:11:56.415472 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (11.634355ms) I0125 05:11:56.415655 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->0 I0125 05:11:56.415713 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.415743 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (38.688µs) I0125 05:11:56.417378 4678 audit.go:45] 2017-01-25T05:11:56.417362433-05:00 AUDIT: id="3864b50d-6123-4d0d-8483-b85b15a41871" response="200" I0125 05:11:56.417438 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (12.840401ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.426111 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:56.443932 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:56.443972 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:56.443978 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:56.444009 4678 admission.go:149] validating pod postgresql-master-1-deploy (generate: ) against providers restricted I0125 05:11:56.444079 4678 admission.go:116] pod postgresql-master-1-deploy (generate: ) validated against provider restricted I0125 05:11:56.445172 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:11:56.449165 4678 audit.go:125] 2017-01-25T05:11:56.449125147-05:00 AUDIT: id="3dbfb0e3-291c-443f-98ee-ec2b6029ccf2" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status" I0125 05:11:56.450852 4678 audit.go:125] 2017-01-25T05:11:56.450823901-05:00 AUDIT: id="c5e77db8-f2cf-4cb7-b0f7-bf532c39e0e4" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:56.454099 4678 factory.go:488] About to try and schedule pod postgresql-master-1-deploy I0125 05:11:56.454113 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy I0125 05:11:56.454523 4678 audit.go:45] 2017-01-25T05:11:56.4545066-05:00 AUDIT: id="23f5e976-929c-47f1-9ed6-df6bc14b726d" response="201" I0125 05:11:56.454623 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (80.513475ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:56.455152 4678 factory.go:648] Attempting to bind postgresql-master-1-deploy to 172.18.7.222 I0125 05:11:56.458575 4678 controller.go:128] Created deployer pod postgresql-master-1-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 I0125 05:11:56.460052 4678 audit.go:45] 2017-01-25T05:11:56.460032361-05:00 AUDIT: id="3dbfb0e3-291c-443f-98ee-ec2b6029ccf2" response="200" I0125 05:11:56.460224 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status: (11.309895ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.460823 4678 audit.go:125] 2017-01-25T05:11:56.460786824-05:00 AUDIT: id="61033134-e179-4703-a1c6-b30b0ede68ec" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:11:56.461887 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:11:56.462084 4678 replication_controller.go:255] No controllers found for pod postgresql-master-1-deploy, replication manager will avoid syncing I0125 05:11:56.462109 4678 replica_set.go:288] Pod postgresql-master-1-deploy created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy", UID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"10956", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935916, nsec:444439356, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-master-1"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-master-1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc43511f8c0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-master-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc43511f980), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc431814690), ActiveDeadlineSeconds:(*int64)(0xc431814698), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc437901f40), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:11:56.462382 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:56.462464 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-deploy, job controller will avoid syncing I0125 05:11:56.462488 4678 daemoncontroller.go:309] Pod postgresql-master-1-deploy added. I0125 05:11:56.462545 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-deploy, daemon set controller will avoid syncing I0125 05:11:56.462567 4678 disruption.go:314] addPod called on pod "postgresql-master-1-deploy" I0125 05:11:56.462601 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:56.462606 4678 disruption.go:317] No matching pdb for pod "postgresql-master-1-deploy" I0125 05:11:56.462657 4678 pet_set.go:160] Pod postgresql-master-1-deploy created, labels: map[openshift.io/deployer-pod-for.name:postgresql-master-1] I0125 05:11:56.462703 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:56.464120 4678 audit.go:125] 2017-01-25T05:11:56.464088755-05:00 AUDIT: id="92b4f814-da23-485a-8dc3-6e0d4cb0d825" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:11:56.464551 4678 audit.go:45] 2017-01-25T05:11:56.464539471-05:00 AUDIT: id="92b4f814-da23-485a-8dc3-6e0d4cb0d825" response="200" I0125 05:11:56.464863 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (985.477µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.467408 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" (observed generation: 2) I0125 05:11:56.468103 4678 audit.go:45] 2017-01-25T05:11:56.468088716-05:00 AUDIT: id="61033134-e179-4703-a1c6-b30b0ede68ec" response="201" I0125 05:11:56.468157 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (7.668566ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.468905 4678 audit.go:45] 2017-01-25T05:11:56.468892193-05:00 AUDIT: id="c5e77db8-f2cf-4cb7-b0f7-bf532c39e0e4" response="201" I0125 05:11:56.468965 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (18.328487ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.470097 4678 config.go:281] Setting pods for source api I0125 05:11:56.470790 4678 config.go:397] Receiving a new pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.470858 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.471412 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.472332 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.473026 4678 audit.go:125] 2017-01-25T05:11:56.472990807-05:00 AUDIT: id="a7af6da7-eb8d-4c4e-b7aa-30b83d8728a8" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy" I0125 05:11:56.474506 4678 replication_controller.go:378] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10956 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10959 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.474623 4678 replication_controller.go:255] No controllers found for pod postgresql-master-1-deploy, replication manager will avoid syncing I0125 05:11:56.474641 4678 replica_set.go:320] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10956 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10959 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.474717 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:56.474741 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-deploy, job controller will avoid syncing I0125 05:11:56.474757 4678 daemoncontroller.go:332] Pod postgresql-master-1-deploy updated. I0125 05:11:56.474778 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-deploy, daemon set controller will avoid syncing I0125 05:11:56.474793 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-deploy" I0125 05:11:56.474805 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:56.474810 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-deploy" I0125 05:11:56.474862 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:56.475416 4678 audit.go:125] 2017-01-25T05:11:56.475381818-05:00 AUDIT: id="0d27a96c-9ccf-4998-9fd4-35b96c007c58" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:56.477477 4678 audit.go:125] 2017-01-25T05:11:56.477432952-05:00 AUDIT: id="11cf4d88-fc0a-4369-8dd1-e9e6ef06c2e4" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:11:56.478065 4678 audit.go:45] 2017-01-25T05:11:56.478052556-05:00 AUDIT: id="11cf4d88-fc0a-4369-8dd1-e9e6ef06c2e4" response="409" I0125 05:11:56.478125 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (12.677826ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:56.478261 4678 audit.go:125] 2017-01-25T05:11:56.478227181-05:00 AUDIT: id="d2e753a2-1b85-4a64-a5bd-d316d2196b61" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:11:56.478596 4678 admission.go:77] getting security context constraints for pod postgresql-slave-1-deploy (generate: ) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:deploymentconfig-controller ca98311b-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:11:56.478633 4678 admission.go:88] getting security context constraints for pod postgresql-slave-1-deploy (generate: ) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:11:56.479478 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:11:56.498302 4678 controller.go:155] Detected existing deployer pod postgresql-master-1-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 I0125 05:11:56.500453 4678 audit.go:125] 2017-01-25T05:11:56.500405139-05:00 AUDIT: id="7555a2e8-b1ae-4871-b095-0feca097fd8a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:56.504010 4678 audit.go:45] 2017-01-25T05:11:56.503985657-05:00 AUDIT: id="a7af6da7-eb8d-4c4e-b7aa-30b83d8728a8" response="200" I0125 05:11:56.504324 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy: (31.592133ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.508509 4678 audit.go:125] 2017-01-25T05:11:56.508369703-05:00 AUDIT: id="75dde277-fd10-4609-b457-1786b82c97bb" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status" I0125 05:11:56.509822 4678 audit.go:45] 2017-01-25T05:11:56.509799048-05:00 AUDIT: id="7555a2e8-b1ae-4871-b095-0feca097fd8a" response="200" I0125 05:11:56.509973 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (9.908001ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.510881 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:56.510908 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:56.510925 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:56.510959 4678 admission.go:149] validating pod postgresql-slave-1-deploy (generate: ) against providers restricted I0125 05:11:56.511067 4678 admission.go:116] pod postgresql-slave-1-deploy (generate: ) validated against provider restricted I0125 05:11:56.511947 4678 audit.go:45] 2017-01-25T05:11:56.511920239-05:00 AUDIT: id="0d27a96c-9ccf-4998-9fd4-35b96c007c58" response="201" I0125 05:11:56.512131 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (36.983764ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.513877 4678 audit.go:125] 2017-01-25T05:11:56.513781009-05:00 AUDIT: id="d3950e1f-381d-4f2b-8ca4-7468c49c06b3" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:11:56.516320 4678 audit.go:45] 2017-01-25T05:11:56.516299383-05:00 AUDIT: id="75dde277-fd10-4609-b457-1786b82c97bb" response="200" I0125 05:11:56.516536 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status: (8.524055ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.517043 4678 audit.go:45] 2017-01-25T05:11:56.517022982-05:00 AUDIT: id="d2e753a2-1b85-4a64-a5bd-d316d2196b61" response="201" I0125 05:11:56.517261 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (69.709653ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:56.521174 4678 controller.go:128] Created deployer pod postgresql-slave-1-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 I0125 05:11:56.522555 4678 config.go:281] Setting pods for source api I0125 05:11:56.523243 4678 factory.go:488] About to try and schedule pod postgresql-slave-1-deploy I0125 05:11:56.523255 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy I0125 05:11:56.523992 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.524076 4678 audit.go:45] 2017-01-25T05:11:56.524049761-05:00 AUDIT: id="d3950e1f-381d-4f2b-8ca4-7468c49c06b3" response="200" I0125 05:11:56.524252 4678 status_manager.go:425] Status for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [deployment]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc432769900 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting:0xc4327698e0 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID: ContainerID:}]} version:1 podName:postgresql-master-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:11:56.527373 4678 audit.go:125] 2017-01-25T05:11:56.527326223-05:00 AUDIT: id="c7fc9121-342a-473b-8caa-de51fcf8b4a5" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:11:56.531016 4678 audit.go:45] 2017-01-25T05:11:56.530999302-05:00 AUDIT: id="c7fc9121-342a-473b-8caa-de51fcf8b4a5" response="200" I0125 05:11:56.531276 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (6.498476ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:56.531651 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 status from New to Pending (scale: 0) I0125 05:11:56.534852 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 0->0 I0125 05:11:56.534895 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->0 I0125 05:11:56.534948 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.534987 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (53.438µs) I0125 05:11:56.535019 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.535036 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (21.249µs) I0125 05:11:56.536340 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:11:56.536377 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. E0125 05:11:56.537754 4678 controller.go:169] Failing deployment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" because its deployer pod "postgresql-slave-1-deploy" disappeared I0125 05:11:56.539732 4678 audit.go:125] 2017-01-25T05:11:56.539680977-05:00 AUDIT: id="9d60d32c-0cfb-4443-9017-ff9c609e87c9" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:11:56.541464 4678 audit.go:125] 2017-01-25T05:11:56.541422084-05:00 AUDIT: id="8cfe6c32-12c9-44b4-a707-c8a3e647cc83" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status" I0125 05:11:56.547772 4678 audit.go:45] 2017-01-25T05:11:56.547752366-05:00 AUDIT: id="9d60d32c-0cfb-4443-9017-ff9c609e87c9" response="200" I0125 05:11:56.547961 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (8.559384ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.548323 4678 audit.go:45] 2017-01-25T05:11:56.548310022-05:00 AUDIT: id="8cfe6c32-12c9-44b4-a707-c8a3e647cc83" response="200" I0125 05:11:56.548462 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status: (7.296117ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.550597 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 2) I0125 05:11:56.550757 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" (observed generation: 2) I0125 05:11:56.551872 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:11:56.551890 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:11:56.553508 4678 audit.go:125] 2017-01-25T05:11:56.553459252-05:00 AUDIT: id="aa41965f-c352-49ea-9f72-eee3b971c20a" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:11:56.555117 4678 audit.go:125] 2017-01-25T05:11:56.555084861-05:00 AUDIT: id="e2c904d4-798b-4263-8067-28bdf59e1904" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:56.558335 4678 audit.go:45] 2017-01-25T05:11:56.5583201-05:00 AUDIT: id="aa41965f-c352-49ea-9f72-eee3b971c20a" response="200" I0125 05:11:56.559275 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (16.211583ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:56.559880 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:11:56.561290 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->0 I0125 05:11:56.561354 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.561399 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (53.375µs) I0125 05:11:56.561597 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 status from Pending to Failed (scale: 0) I0125 05:11:56.563119 4678 audit.go:125] 2017-01-25T05:11:56.563066984-05:00 AUDIT: id="e909d56a-95d3-4e9c-ac85-086b70bad4df" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status" I0125 05:11:56.564926 4678 audit.go:45] 2017-01-25T05:11:56.564911212-05:00 AUDIT: id="e2c904d4-798b-4263-8067-28bdf59e1904" response="201" I0125 05:11:56.564987 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (21.286095ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:56.566553 4678 audit.go:45] 2017-01-25T05:11:56.566536629-05:00 AUDIT: id="e909d56a-95d3-4e9c-ac85-086b70bad4df" response="200" I0125 05:11:56.566673 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status: (3.920202ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.567274 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" (observed generation: 2) I0125 05:11:56.568092 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:11:56.569231 4678 factory.go:648] Attempting to bind postgresql-slave-1-deploy to 172.18.7.222 I0125 05:11:56.570050 4678 audit.go:125] 2017-01-25T05:11:56.570013675-05:00 AUDIT: id="92ab8716-dee4-4553-a6c8-67e16b15b60a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:11:56.572427 4678 audit.go:45] 2017-01-25T05:11:56.572412999-05:00 AUDIT: id="92ab8716-dee4-4553-a6c8-67e16b15b60a" response="201" I0125 05:11:56.572482 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (2.745308ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.573112 4678 config.go:281] Setting pods for source api I0125 05:11:56.573915 4678 config.go:397] Receiving a new pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.574042 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.574164 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.574767 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.578413 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:11:56.580347 4678 audit.go:125] 2017-01-25T05:11:56.580304404-05:00 AUDIT: id="b22eb43e-c085-4683-98c9-f65a112eb4da" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:56.581603 4678 audit.go:125] 2017-01-25T05:11:56.581570233-05:00 AUDIT: id="72e2319b-e30a-4ca8-ac44-04f7a3fd0bd5" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy" I0125 05:11:56.583745 4678 audit.go:45] 2017-01-25T05:11:56.583729601-05:00 AUDIT: id="72e2319b-e30a-4ca8-ac44-04f7a3fd0bd5" response="200" I0125 05:11:56.583854 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy: (2.517189ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.585526 4678 audit.go:45] 2017-01-25T05:11:56.585512067-05:00 AUDIT: id="b22eb43e-c085-4683-98c9-f65a112eb4da" response="201" I0125 05:11:56.585577 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (5.585979ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.586351 4678 audit.go:125] 2017-01-25T05:11:56.586309382-05:00 AUDIT: id="ba5a1b53-340b-49e1-a547-1d0ece7accb9" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status" I0125 05:11:56.588842 4678 audit.go:45] 2017-01-25T05:11:56.588827997-05:00 AUDIT: id="ba5a1b53-340b-49e1-a547-1d0ece7accb9" response="200" I0125 05:11:56.588930 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status: (2.902496ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.589472 4678 replication_controller.go:378] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10959 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10961 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.589665 4678 replication_controller.go:255] No controllers found for pod postgresql-master-1-deploy, replication manager will avoid syncing I0125 05:11:56.589732 4678 replication_controller.go:255] No controllers found for pod postgresql-slave-1-deploy, replication manager will avoid syncing I0125 05:11:56.589746 4678 replication_controller.go:378] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10962 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10970 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.589857 4678 replication_controller.go:255] No controllers found for pod postgresql-slave-1-deploy, replication manager will avoid syncing I0125 05:11:56.589876 4678 replication_controller.go:378] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10970 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10972 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.590029 4678 replication_controller.go:255] No controllers found for pod postgresql-slave-1-deploy, replication manager will avoid syncing I0125 05:11:56.590005 4678 status_manager.go:425] Status for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [deployment]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc430d2f780 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting:0xc430d2f760 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID: ContainerID:}]} version:1 podName:postgresql-slave-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:11:56.590078 4678 replica_set.go:320] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10959 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10961 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.590167 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:56.590177 4678 replica_set.go:288] Pod postgresql-slave-1-deploy created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy", UID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"10962", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935916, nsec:511595799, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-slave-1"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42cc62e40), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-slave-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42cc62f00), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc42d4e1600), ActiveDeadlineSeconds:(*int64)(0xc42d4e1608), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc432cf1580), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:11:56.590516 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:56.590535 4678 config.go:281] Setting pods for source api I0125 05:11:56.590531 4678 replica_set.go:320] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10962 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10970 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.590612 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:56.590623 4678 replica_set.go:320] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10970 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10972 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:56.590702 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:56.590748 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-deploy, job controller will avoid syncing I0125 05:11:56.590779 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-deploy, job controller will avoid syncing I0125 05:11:56.590799 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-deploy, job controller will avoid syncing I0125 05:11:56.590812 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-deploy, job controller will avoid syncing I0125 05:11:56.590832 4678 daemoncontroller.go:332] Pod postgresql-master-1-deploy updated. I0125 05:11:56.590855 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-deploy, daemon set controller will avoid syncing I0125 05:11:56.590864 4678 daemoncontroller.go:309] Pod postgresql-slave-1-deploy added. I0125 05:11:56.590885 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-deploy, daemon set controller will avoid syncing I0125 05:11:56.590901 4678 daemoncontroller.go:332] Pod postgresql-slave-1-deploy updated. I0125 05:11:56.590921 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-deploy, daemon set controller will avoid syncing I0125 05:11:56.590930 4678 daemoncontroller.go:332] Pod postgresql-slave-1-deploy updated. I0125 05:11:56.590945 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-deploy, daemon set controller will avoid syncing I0125 05:11:56.590976 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-deploy" I0125 05:11:56.591007 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:56.591014 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-deploy" I0125 05:11:56.591022 4678 disruption.go:314] addPod called on pod "postgresql-slave-1-deploy" I0125 05:11:56.591032 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:56.591038 4678 disruption.go:317] No matching pdb for pod "postgresql-slave-1-deploy" I0125 05:11:56.591047 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-deploy" I0125 05:11:56.591072 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:56.591079 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-deploy" I0125 05:11:56.591086 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-deploy" I0125 05:11:56.591097 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:56.591102 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-deploy" I0125 05:11:56.591252 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:56.591263 4678 pet_set.go:160] Pod postgresql-slave-1-deploy created, labels: map[openshift.io/deployer-pod-for.name:postgresql-slave-1] I0125 05:11:56.591272 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:56.591284 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:56.591293 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:56.591568 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.591684 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (87.224389ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:56.591993 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 status from New to Pending (scale: 0) I0125 05:11:56.610777 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:11:56.627435 4678 namespace_controller.go:197] Namespace has been deleted extended-test-cakephp-mysql-repo-test-4jx76-x2n7w I0125 05:11:56.627477 4678 namespace_controller.go:198] Finished syncing namespace "extended-test-cakephp-mysql-repo-test-4jx76-x2n7w" (646ns) I0125 05:11:56.643842 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:11:56.679437 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:11:56.696740 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") to pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:11:56.696815 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:11:56.697019 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:11:56.697600 4678 empty_dir.go:248] pod b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_deployer-token-r7jj8 I0125 05:11:56.697617 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 tmpfs [] with command: "mount" I0125 05:11:56.697626 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8]) I0125 05:11:56.760879 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:11:56.760922 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:11:56.782768 4678 audit.go:125] 2017-01-25T05:11:56.782724615-05:00 AUDIT: id="53a9095d-81e3-4f0a-aa55-e79063535c6e" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:11:56.786136 4678 audit.go:125] 2017-01-25T05:11:56.786099599-05:00 AUDIT: id="ebfd6b98-73a5-4fb7-a467-4ec6d21f70f7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:11:56.789521 4678 audit.go:45] 2017-01-25T05:11:56.789503905-05:00 AUDIT: id="53a9095d-81e3-4f0a-aa55-e79063535c6e" response="200" I0125 05:11:56.790770 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (8.422764ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.791848 4678 audit.go:45] 2017-01-25T05:11:56.791834417-05:00 AUDIT: id="ebfd6b98-73a5-4fb7-a467-4ec6d21f70f7" response="200" I0125 05:11:56.792008 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (6.190941ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.792941 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:11:56.793924 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:11:56.794057 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy volume deployer-token-r7jj8: write required for target directory /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:11:56.796660 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy volume deployer-token-r7jj8: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8/..1981_25_01_05_11_56.199715798 I0125 05:11:56.797138 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:11:56.797390 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") to pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:11:56.797444 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:11:56.797724 4678 empty_dir.go:248] pod b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_deployer-token-r7jj8 I0125 05:11:56.797737 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 tmpfs [] with command: "mount" I0125 05:11:56.797745 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8]) I0125 05:11:56.802133 4678 generic.go:145] GenericPLEG: 98639e6c-e2e6-11e6-a4b0-0e6a5cbf0094/1b1019c4dbb189263f336af5eb9e4774791c819a70507030c714a837d9c04a3a: exited -> non-existent I0125 05:11:56.802165 4678 generic.go:145] GenericPLEG: 99f51a19-e2e6-11e6-a4b0-0e6a5cbf0094/3898d0a93eeac7342b1d1f67b3451372ea7edf63c3311b6132453843666e822f: exited -> non-existent I0125 05:11:56.802174 4678 generic.go:145] GenericPLEG: 99f51a19-e2e6-11e6-a4b0-0e6a5cbf0094/f29a22668a071e0d1e50ece55052402a83e4fee1353aff2077cc71982b710a21: exited -> non-existent I0125 05:11:56.802182 4678 generic.go:333] PLEG: Delete status for pod "99f51a19-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:11:56.802226 4678 generic.go:333] PLEG: Delete status for pod "98639e6c-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:11:56.828179 4678 audit.go:125] 2017-01-25T05:11:56.828107631-05:00 AUDIT: id="f9346928-2d23-42bf-b096-bd2277076a65" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/processedtemplates" I0125 05:11:56.830652 4678 audit.go:45] 2017-01-25T05:11:56.830639936-05:00 AUDIT: id="f9346928-2d23-42bf-b096-bd2277076a65" response="201" I0125 05:11:56.831385 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/processedtemplates: (6.354122ms) 201 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:40456] I0125 05:11:56.860425 4678 audit.go:125] 2017-01-25T05:11:56.860308416-05:00 AUDIT: id="25012bda-39e0-4ea4-8cf0-c72f6e850716" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:11:56.863372 4678 audit.go:45] 2017-01-25T05:11:56.863351449-05:00 AUDIT: id="25012bda-39e0-4ea4-8cf0-c72f6e850716" response="201" I0125 05:11:56.863496 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (12.97427ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40456] I0125 05:11:56.868525 4678 audit.go:125] 2017-01-25T05:11:56.868479313-05:00 AUDIT: id="9b2ff693-88a7-4c67-a0e1-ea48f31195b0" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services" I0125 05:11:56.871303 4678 audit.go:125] 2017-01-25T05:11:56.87125431-05:00 AUDIT: id="b4ec713d-f05e-4199-9fa3-6c3b54abb208" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:11:56.872463 4678 audit.go:45] 2017-01-25T05:11:56.872448399-05:00 AUDIT: id="b4ec713d-f05e-4199-9fa3-6c3b54abb208" response="200" I0125 05:11:56.872670 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (1.692862ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.872847 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:11:56.872854 4678 rest.go:568] Service type: ClusterIP does not need health check node port I0125 05:11:56.872928 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy volume deployer-token-r7jj8: write required for target directory /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:11:56.873334 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy volume deployer-token-r7jj8: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8/..1981_25_01_05_11_56.857893693 I0125 05:11:56.873696 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:11:56.874000 4678 audit.go:45] 2017-01-25T05:11:56.873986816-05:00 AUDIT: id="9b2ff693-88a7-4c67-a0e1-ea48f31195b0" response="201" I0125 05:11:56.874110 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services: (7.287364ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40456] I0125 05:11:56.874377 4678 secret_creating_controller.go:98] Adding service postgresql-helper I0125 05:11:56.874752 4678 config.go:274] Service handler already has a pending interrupt. I0125 05:11:56.876306 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.876484 4678 proxier.go:797] syncProxyRules took 696.026089ms I0125 05:11:56.876505 4678 proxier.go:431] OnServiceUpdate took 696.086771ms for 3 services I0125 05:11:56.876532 4678 config.go:208] Calling handler.OnServiceUpdate() I0125 05:11:56.876547 4678 proxier.go:381] Received update notice: [] I0125 05:11:56.876564 4678 proxier.go:471] Adding new service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql" at 172.30.122.147:5432/TCP I0125 05:11:56.876592 4678 proxier.go:495] Deleting health check for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper I0125 05:11:56.876670 4678 proxier.go:501] added serviceInfo(extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql): (*iptables.serviceInfo)(0xc42e386a50)({ clusterIP: (net.IP) (len=16 cap=16) 172.30.122.147, port: (int) 5432, protocol: (api.Protocol) (len=3) "TCP", nodePort: (int) 0, loadBalancerStatus: (api.LoadBalancerStatus) { Ingress: ([]api.LoadBalancerIngress) { } }, sessionAffinityType: (api.ServiceAffinity) (len=4) "None", stickyMaxAgeMinutes: (int) 180, externalIPs: ([]string) , loadBalancerSourceRanges: ([]string) , onlyNodeLocalEndpoints: (bool) false, healthCheckNodePort: (int) 0 }) I0125 05:11:56.876705 4678 proxier.go:804] Syncing iptables rules I0125 05:11:56.876717 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:11:56.889775 4678 audit.go:125] 2017-01-25T05:11:56.889719945-05:00 AUDIT: id="f7ebc7bd-b0a3-438b-9067-9d41f8ad11de" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:11:56.894491 4678 audit.go:45] 2017-01-25T05:11:56.89445995-05:00 AUDIT: id="f7ebc7bd-b0a3-438b-9067-9d41f8ad11de" response="200" I0125 05:11:56.895093 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (5.714645ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:56.896495 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:11:56.896579 4678 docker_manager.go:1992] Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-slave-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42c5c8630 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:11:56.896627 4678 docker_manager.go:2086] Got container changes for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-slave-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42c5c8630 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:11:56.896669 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:11:56.896691 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:56.898005 4678 audit.go:125] 2017-01-25T05:11:56.897956131-05:00 AUDIT: id="f422d236-be3d-456d-af6a-b61dd1415ca5" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs" I0125 05:11:56.900221 4678 audit.go:125] 2017-01-25T05:11:56.900151564-05:00 AUDIT: id="206be3ec-cde8-4d5b-8090-8e33636dbdd6" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:11:56.903150 4678 audit.go:45] 2017-01-25T05:11:56.903135007-05:00 AUDIT: id="f422d236-be3d-456d-af6a-b61dd1415ca5" response="201" I0125 05:11:56.903415 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs: (14.573602ms) 201 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40456] I0125 05:11:56.903686 4678 audit.go:45] 2017-01-25T05:11:56.903674216-05:00 AUDIT: id="206be3ec-cde8-4d5b-8090-8e33636dbdd6" response="404" I0125 05:11:56.903741 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (13.005527ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:11:56.904074 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:11:56.904561 4678 factory.go:110] Adding deployment config "postgresql-helper" I0125 05:11:56.905509 4678 audit.go:125] 2017-01-25T05:11:56.905465336-05:00 AUDIT: id="94a393c3-c2f5-418b-aee2-ceebd7c8bae6" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:11:56.907786 4678 audit.go:125] 2017-01-25T05:11:56.907749603-05:00 AUDIT: id="9a1f9104-7b38-4a30-a5b6-218504b77e8f" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:11:56.909103 4678 audit.go:45] 2017-01-25T05:11:56.909089572-05:00 AUDIT: id="94a393c3-c2f5-418b-aee2-ceebd7c8bae6" response="200" I0125 05:11:56.909261 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (4.084073ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.909641 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:11:56.909795 4678 audit.go:45] 2017-01-25T05:11:56.909782459-05:00 AUDIT: id="9a1f9104-7b38-4a30-a5b6-218504b77e8f" response="201" I0125 05:11:56.909848 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (4.102097ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:11:56.910146 4678 audit.go:125] 2017-01-25T05:11:56.910111978-05:00 AUDIT: id="d61d86ee-77b4-464e-b2f2-dd494958947f" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/instantiate" I0125 05:11:56.910394 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 1) I0125 05:11:56.910567 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (35.560318ms) I0125 05:11:56.910767 4678 config.go:165] Endpoints handler already has a pending interrupt. I0125 05:11:56.912524 4678 audit.go:125] 2017-01-25T05:11:56.91249471-05:00 AUDIT: id="ab7babe9-1d7b-48f8-b46c-80e2c81cb40a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/postgresql" I0125 05:11:56.913788 4678 audit.go:45] 2017-01-25T05:11:56.913775648-05:00 AUDIT: id="ab7babe9-1d7b-48f8-b46c-80e2c81cb40a" response="200" I0125 05:11:56.914033 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/postgresql: (1.775588ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.914772 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy/POD podIP: "" creating hosts mount: false I0125 05:11:56.915009 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:11:56.934222 4678 rest.go:84] New deployment for "postgresql-helper" caused by []api.DeploymentCause{api.DeploymentCause{Type:"ImageChange", ImageTrigger:(*api.DeploymentCauseImageTrigger)(0xc42dad58f0)}} I0125 05:11:56.939417 4678 audit.go:45] 2017-01-25T05:11:56.939395224-05:00 AUDIT: id="d61d86ee-77b4-464e-b2f2-dd494958947f" response="201" I0125 05:11:56.939611 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/instantiate: (29.730276ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:56.940402 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:11:56.943006 4678 audit.go:125] 2017-01-25T05:11:56.942961141-05:00 AUDIT: id="96445d0c-9cf1-4d13-9afa-9862d33269ad" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:11:56.943645 4678 audit.go:125] 2017-01-25T05:11:56.943571749-05:00 AUDIT: id="3d92cdc5-5411-4315-b1bf-0e8cbf6dc21f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper" I0125 05:11:56.948514 4678 audit.go:45] 2017-01-25T05:11:56.948488676-05:00 AUDIT: id="3d92cdc5-5411-4315-b1bf-0e8cbf6dc21f" response="200" I0125 05:11:56.948612 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper: (11.273809ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:11:56.951194 4678 audit.go:125] 2017-01-25T05:11:56.95114566-05:00 AUDIT: id="ffa2ea37-fde0-4e65-8faf-ee021075d528" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper&resourceVersion=10980" I0125 05:11:56.951769 4678 audit.go:45] 2017-01-25T05:11:56.951758634-05:00 AUDIT: id="ffa2ea37-fde0-4e65-8faf-ee021075d528" response="200" I0125 05:11:56.952133 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:56.972021 4678 audit.go:45] 2017-01-25T05:11:56.971995642-05:00 AUDIT: id="96445d0c-9cf1-4d13-9afa-9862d33269ad" response="201" I0125 05:11:56.974463 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (31.747058ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:56.975142 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 either never recorded expectations, or the ttl expired. I0125 05:11:56.975213 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 0->0 (need 0), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 0->1 I0125 05:11:56.983817 4678 audit.go:125] 2017-01-25T05:11:56.983767548-05:00 AUDIT: id="7de01441-c7d5-4408-bcae-efae30d21f6a" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:11:56.985088 4678 audit.go:125] 2017-01-25T05:11:56.985056318-05:00 AUDIT: id="0f74d01a-62d1-452c-92ee-de563c5d6e80" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:11:56.985567 4678 admission.go:77] getting security context constraints for pod postgresql-helper-1-deploy (generate: ) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:deploymentconfig-controller ca98311b-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:11:56.985603 4678 admission.go:88] getting security context constraints for pod postgresql-helper-1-deploy (generate: ) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:11:56.986300 4678 audit.go:125] 2017-01-25T05:11:56.986269623-05:00 AUDIT: id="9823ec85-ae54-4c70-8f17-ec70b64cad7e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:11:56.988351 4678 audit.go:45] 2017-01-25T05:11:56.988335912-05:00 AUDIT: id="7de01441-c7d5-4408-bcae-efae30d21f6a" response="200" I0125 05:11:56.988380 4678 audit.go:125] 2017-01-25T05:11:56.988346227-05:00 AUDIT: id="79df5b76-ddf5-452d-a455-a289839303e8" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:11:56.989832 4678 audit.go:125] 2017-01-25T05:11:56.989800615-05:00 AUDIT: id="38404d4a-2fdc-4762-a5b3-fe48d3007f50" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:56.989905 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (10.56998ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:11:56.990508 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.008119 4678 audit.go:45] 2017-01-25T05:11:57.008078171-05:00 AUDIT: id="9823ec85-ae54-4c70-8f17-ec70b64cad7e" response="200" I0125 05:11:57.008301 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (22.278137ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:57.010017 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (34.889191ms) I0125 05:11:57.010325 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 0->0 I0125 05:11:57.010384 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 either never recorded expectations, or the ttl expired. I0125 05:11:57.010447 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (78.528µs) I0125 05:11:57.011228 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:57.011247 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:57.011254 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:11:57.011276 4678 admission.go:149] validating pod postgresql-helper-1-deploy (generate: ) against providers restricted I0125 05:11:57.011373 4678 admission.go:116] pod postgresql-helper-1-deploy (generate: ) validated against provider restricted I0125 05:11:57.012701 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:11:57.017466 4678 audit.go:45] 2017-01-25T05:11:57.017443353-05:00 AUDIT: id="79df5b76-ddf5-452d-a455-a289839303e8" response="200" I0125 05:11:57.017711 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (38.887581ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:57.020808 4678 audit.go:45] 2017-01-25T05:11:57.020781137-05:00 AUDIT: id="0f74d01a-62d1-452c-92ee-de563c5d6e80" response="201" I0125 05:11:57.020940 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (40.954038ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:57.023950 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 2) I0125 05:11:57.024893 4678 controller.go:128] Created deployer pod postgresql-helper-1-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 I0125 05:11:57.025259 4678 factory.go:488] About to try and schedule pod postgresql-helper-1-deploy I0125 05:11:57.025275 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy I0125 05:11:57.025671 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-deploy, replication manager will avoid syncing I0125 05:11:57.025734 4678 replica_set.go:288] Pod postgresql-helper-1-deploy created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy", UID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"10984", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935917, nsec:11727446, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-helper-1"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/deployment.name":"postgresql-helper-1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc426647bf0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-helper-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc426647cb0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc431663860), ActiveDeadlineSeconds:(*int64)(0xc431663868), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc42f8d6bc0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:11:57.026270 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:57.026362 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-deploy, job controller will avoid syncing I0125 05:11:57.026424 4678 daemoncontroller.go:309] Pod postgresql-helper-1-deploy added. I0125 05:11:57.026473 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-deploy, daemon set controller will avoid syncing I0125 05:11:57.026518 4678 disruption.go:314] addPod called on pod "postgresql-helper-1-deploy" I0125 05:11:57.026535 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:57.026546 4678 disruption.go:317] No matching pdb for pod "postgresql-helper-1-deploy" I0125 05:11:57.026709 4678 pet_set.go:160] Pod postgresql-helper-1-deploy created, labels: map[openshift.io/deployer-pod-for.name:postgresql-helper-1] I0125 05:11:57.026767 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:57.027220 4678 factory.go:648] Attempting to bind postgresql-helper-1-deploy to 172.18.7.222 I0125 05:11:57.028390 4678 audit.go:45] 2017-01-25T05:11:57.028365384-05:00 AUDIT: id="38404d4a-2fdc-4762-a5b3-fe48d3007f50" response="201" I0125 05:11:57.028627 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (39.075493ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:57.030240 4678 audit.go:125] 2017-01-25T05:11:57.030174143-05:00 AUDIT: id="6a059d14-1f5d-4c48-b106-1110c46a1b94" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:11:57.030740 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:11:57.032006 4678 audit.go:45] 2017-01-25T05:11:57.031988842-05:00 AUDIT: id="6a059d14-1f5d-4c48-b106-1110c46a1b94" response="409" I0125 05:11:57.032073 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (2.354007ms) 409 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:57.032579 4678 audit.go:125] 2017-01-25T05:11:57.032535404-05:00 AUDIT: id="1f9ff157-2460-41d9-bb1e-7f5da69dfc21" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:11:57.039339 4678 audit.go:125] 2017-01-25T05:11:57.039292223-05:00 AUDIT: id="3b901b55-53aa-4c4a-9db8-03be133e379d" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:11:57.040264 4678 audit.go:45] 2017-01-25T05:11:57.040251158-05:00 AUDIT: id="3b901b55-53aa-4c4a-9db8-03be133e379d" response="409" I0125 05:11:57.040344 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (10.467686ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:57.040701 4678 controller.go:155] Detected existing deployer pod postgresql-helper-1-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 I0125 05:11:57.042296 4678 controller.go:294] Cannot update the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper": Operation cannot be fulfilled on deploymentconfigs "postgresql-helper": the object has been modified; please apply your changes to the latest version and try again I0125 05:11:57.042319 4678 controller.go:393] Error syncing deployment config extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper: Operation cannot be fulfilled on deploymentconfigs "postgresql-helper": the object has been modified; please apply your changes to the latest version and try again I0125 05:11:57.043288 4678 audit.go:45] 2017-01-25T05:11:57.043273434-05:00 AUDIT: id="1f9ff157-2460-41d9-bb1e-7f5da69dfc21" response="201" I0125 05:11:57.043416 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (11.157621ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:57.045359 4678 config.go:281] Setting pods for source api I0125 05:11:57.045656 4678 replication_controller.go:378] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10984 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10986 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:57.045851 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-deploy, replication manager will avoid syncing I0125 05:11:57.045889 4678 replica_set.go:320] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10984 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10986 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:57.045974 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:57.046004 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-deploy, job controller will avoid syncing I0125 05:11:57.046021 4678 daemoncontroller.go:332] Pod postgresql-helper-1-deploy updated. I0125 05:11:57.046051 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-deploy, daemon set controller will avoid syncing I0125 05:11:57.046060 4678 config.go:397] Receiving a new pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.046161 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:57.046453 4678 audit.go:125] 2017-01-25T05:11:57.046409229-05:00 AUDIT: id="f35d1c4f-10b8-43a6-8bd9-e8b9674554e4" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:11:57.046706 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.046874 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.047683 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.046068 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-deploy" I0125 05:11:57.047726 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:57.047733 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-deploy" I0125 05:11:57.048504 4678 audit.go:125] 2017-01-25T05:11:57.048470415-05:00 AUDIT: id="bf372631-4ae9-4d32-94a0-aa05ad1c5a21" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy" I0125 05:11:57.049131 4678 audit.go:125] 2017-01-25T05:11:57.049077996-05:00 AUDIT: id="932ed029-c5f1-498e-81e4-c23323ec81d1" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:57.050460 4678 audit.go:45] 2017-01-25T05:11:57.050445911-05:00 AUDIT: id="bf372631-4ae9-4d32-94a0-aa05ad1c5a21" response="200" I0125 05:11:57.050597 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy: (2.422104ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:57.052858 4678 audit.go:125] 2017-01-25T05:11:57.052828038-05:00 AUDIT: id="0ae3b9a9-fb5b-4d11-8e63-fd3adf4b1825" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status" I0125 05:11:57.054740 4678 audit.go:45] 2017-01-25T05:11:57.054727271-05:00 AUDIT: id="932ed029-c5f1-498e-81e4-c23323ec81d1" response="201" I0125 05:11:57.054822 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (5.985293ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:57.056272 4678 audit.go:45] 2017-01-25T05:11:57.05625889-05:00 AUDIT: id="0ae3b9a9-fb5b-4d11-8e63-fd3adf4b1825" response="200" I0125 05:11:57.056337 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status: (3.739156ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:57.057148 4678 replication_controller.go:378] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10986 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10989 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:57.057301 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-deploy, replication manager will avoid syncing I0125 05:11:57.057323 4678 replica_set.go:320] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10986 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10989 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:11:57.057397 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-deploy, ReplicaSet controller will avoid syncing I0125 05:11:57.057421 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-deploy, job controller will avoid syncing I0125 05:11:57.057437 4678 daemoncontroller.go:332] Pod postgresql-helper-1-deploy updated. I0125 05:11:57.057456 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-deploy, daemon set controller will avoid syncing I0125 05:11:57.057469 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-deploy" I0125 05:11:57.057481 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:11:57.057486 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-deploy" I0125 05:11:57.057542 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-deploy, StatefulSet controller will avoid syncing I0125 05:11:57.057758 4678 status_manager.go:425] Status for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935917 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935917 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [deployment]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935917 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc4300476a0 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting:0xc430047680 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID: ContainerID:}]} version:1 podName:postgresql-helper-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:11:57.057950 4678 config.go:281] Setting pods for source api I0125 05:11:57.058890 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.059108 4678 audit.go:45] 2017-01-25T05:11:57.059095773-05:00 AUDIT: id="f35d1c4f-10b8-43a6-8bd9-e8b9674554e4" response="200" I0125 05:11:57.059353 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (18.055524ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:57.059626 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 status from New to Pending (scale: 0) I0125 05:11:57.059680 4678 controller.go:155] Detected existing deployer pod postgresql-helper-1-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 I0125 05:11:57.062150 4678 audit.go:125] 2017-01-25T05:11:57.062114141-05:00 AUDIT: id="512234a1-b5f2-4236-848d-4857ecf9c93f" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:11:57.066360 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:11:57.067354 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 0->0 I0125 05:11:57.067393 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 either never recorded expectations, or the ttl expired. I0125 05:11:57.067423 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (39.308µs) I0125 05:11:57.068264 4678 store.go:283] GuaranteedUpdate of kubernetes.io/controllers/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 failed because of a conflict, going to retry I0125 05:11:57.068507 4678 audit.go:45] 2017-01-25T05:11:57.068491889-05:00 AUDIT: id="512234a1-b5f2-4236-848d-4857ecf9c93f" response="409" I0125 05:11:57.068569 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (8.318371ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:11:57.069303 4678 audit.go:125] 2017-01-25T05:11:57.069263885-05:00 AUDIT: id="01f7a7e5-18b5-43c5-a94f-9d13aabded3a" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:11:57.072233 4678 audit.go:45] 2017-01-25T05:11:57.072217994-05:00 AUDIT: id="01f7a7e5-18b5-43c5-a94f-9d13aabded3a" response="200" I0125 05:11:57.072367 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (3.382585ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:11:57.072765 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 2) I0125 05:11:57.073530 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:11:57.075290 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.075986 4678 audit.go:125] 2017-01-25T05:11:57.075946745-05:00 AUDIT: id="386b2593-4c7a-4b4d-b84d-fac5f0e323cd" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:11:57.077218 4678 audit.go:45] 2017-01-25T05:11:57.077188748-05:00 AUDIT: id="386b2593-4c7a-4b4d-b84d-fac5f0e323cd" response="200" I0125 05:11:57.077445 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.775881ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:57.079053 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.096641 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:11:57.096733 4678 docker_manager.go:1992] Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-master-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc4335a20c0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:11:57.096781 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-master-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc4335a20c0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:11:57.096837 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:11:57.096867 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.106127 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy/POD podIP: "" creating hosts mount: false I0125 05:11:57.111630 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:11:57.129169 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy/POD: setting entrypoint "[]" and command "[]" I0125 05:11:57.129682 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:11:57.144572 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:11:57.177566 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:11:57.210329 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:11:57.223942 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy/POD: setting entrypoint "[]" and command "[]" I0125 05:11:57.230729 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") to pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:11:57.230827 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:11:57.231316 4678 empty_dir.go:248] pod b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_deployer-token-r7jj8 I0125 05:11:57.231332 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 tmpfs [] with command: "mount" I0125 05:11:57.231372 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8]) I0125 05:11:57.255095 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:11:57.255142 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:11:57.283062 4678 audit.go:125] 2017-01-25T05:11:57.283008096-05:00 AUDIT: id="099da602-fc7f-4e1a-9aad-fa26bcc48816" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:11:57.284481 4678 audit.go:45] 2017-01-25T05:11:57.284465195-05:00 AUDIT: id="099da602-fc7f-4e1a-9aad-fa26bcc48816" response="200" I0125 05:11:57.284644 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (1.95318ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:57.284942 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:11:57.285057 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy volume deployer-token-r7jj8: write required for target directory /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:11:57.285537 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy volume deployer-token-r7jj8: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8/..1981_25_01_05_11_57.916138872 I0125 05:11:57.285877 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:11:57.289681 4678 proxier.go:797] syncProxyRules took 412.973201ms I0125 05:11:57.289707 4678 proxier.go:431] OnServiceUpdate took 413.149178ms for 4 services I0125 05:11:57.289826 4678 proxier.go:804] Syncing iptables rules I0125 05:11:57.289837 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:11:57.301621 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:11:57.301727 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:11:57.301814 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:11:57.301825 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:11:57.301833 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:11:57.301840 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:11:57.301847 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:11:57.312694 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:11:57.334940 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.350433 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.352927 4678 audit.go:125] 2017-01-25T05:11:57.352865485-05:00 AUDIT: id="75f271f4-dbd1-48bb-974d-ec9c7582c920" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:11:57.354686 4678 audit.go:45] 2017-01-25T05:11:57.354669577-05:00 AUDIT: id="75f271f4-dbd1-48bb-974d-ec9c7582c920" response="200" I0125 05:11:57.354931 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (2.370011ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:57.355293 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:11:57.355365 4678 docker_manager.go:1992] Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-helper-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42c5a9740 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:11:57.355394 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-helper-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42c5a9740 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:11:57.355451 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:11:57.355477 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:11:57.359854 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.379110 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.399876 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:11:57.420086 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:11:57.440274 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy/POD podIP: "" creating hosts mount: false I0125 05:11:57.443598 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:11:57.477374 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:11:57.509549 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:11:57.509588 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:11:57.555815 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy/POD: setting entrypoint "[]" and command "[]" I0125 05:11:57.562214 4678 proxier.go:797] syncProxyRules took 272.382684ms I0125 05:11:57.562256 4678 proxier.go:566] OnEndpointsUpdate took 1.30558351s for 4 endpoints I0125 05:11:57.562308 4678 proxier.go:381] Received update notice: [] I0125 05:11:57.562356 4678 proxier.go:804] Syncing iptables rules I0125 05:11:57.562369 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:11:57.592377 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:11:57.613985 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.633854 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.653379 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.666547 4678 panics.go:76] GET /api/v1/watch/persistentvolumeclaims?resourceVersion=8550&timeoutSeconds=470: (7m50.002189961s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:57.671396 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *api.PersistentVolumeClaim total 3 items received I0125 05:11:57.672368 4678 audit.go:125] 2017-01-25T05:11:57.672320358-05:00 AUDIT: id="237b1682-0b50-436c-a964-67af7ba66403" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/persistentvolumeclaims?resourceVersion=10942&timeoutSeconds=301" I0125 05:11:57.673087 4678 audit.go:45] 2017-01-25T05:11:57.673072454-05:00 AUDIT: id="237b1682-0b50-436c-a964-67af7ba66403" response="200" I0125 05:11:57.677116 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:11:57.686820 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:11:57.699054 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:11:57.722678 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:11:57.745855 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:11:57.767083 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:11:57.767120 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:11:57.788189 4678 proxier.go:797] syncProxyRules took 225.825057ms I0125 05:11:57.788247 4678 proxier.go:431] OnServiceUpdate took 225.922842ms for 4 services I0125 05:11:57.788283 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:11:57.788422 4678 proxier.go:804] Syncing iptables rules I0125 05:11:57.788434 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:11:57.798067 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:11:57.798176 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:11:57.798193 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:11:57.798217 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:11:57.798225 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:11:57.798256 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:11:57.798266 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:11:57.810526 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:11:57.836819 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.869575 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.909488 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:57.944921 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:11:57.971521 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:11:58.001535 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:11:58.032370 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:11:58.074561 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:11:58.074600 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:11:58.103812 4678 proxier.go:797] syncProxyRules took 315.385566ms I0125 05:11:58.103844 4678 proxier.go:566] OnEndpointsUpdate took 315.484059ms for 6 endpoints I0125 05:11:58.103903 4678 proxier.go:381] Received update notice: [] I0125 05:11:58.103940 4678 proxier.go:804] Syncing iptables rules I0125 05:11:58.103949 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:11:58.122792 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:11:58.141178 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:58.158713 4678 audit.go:125] 2017-01-25T05:11:58.15867145-05:00 AUDIT: id="1fabc3a6-4aae-48fd-8ff8-90474789ace8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:11:58.159334 4678 audit.go:45] 2017-01-25T05:11:58.159320519-05:00 AUDIT: id="1fabc3a6-4aae-48fd-8ff8-90474789ace8" response="200" I0125 05:11:58.159710 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.285298ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:11:58.160157 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:11:58.162265 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:58.181339 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:11:58.201927 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:11:58.226180 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:11:58.258673 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:11:58.279676 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:11:58.301738 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:11:58.301785 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:11:58.330801 4678 proxier.go:797] syncProxyRules took 226.841312ms I0125 05:11:58.330839 4678 proxier.go:431] OnServiceUpdate took 226.92166ms for 4 services I0125 05:11:58.424681 4678 generic.go:145] GenericPLEG: b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f: non-existent -> running I0125 05:11:58.425273 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f.scope" E0125 05:11:58.431379 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f.log: no such file or directory I0125 05:11:58.431715 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:11:58.431787 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094) E0125 05:11:58.696656 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a.log: no such file or directory I0125 05:11:58.696733 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:11:58.696833 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:11:58.716119 4678 hairpin.go:110] Enabling hairpin on interface veth991c31b I0125 05:11:58.716410 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.2" I0125 05:11:58.716439 4678 docker_manager.go:2293] Creating container &{Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-slave-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42c5c8630 Stdin:false StdinOnce:false TTY:false} in pod postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:11:58.757948 4678 manager.go:898] Added container: "/system.slice/docker-764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f.scope" (aliases: [k8s_POD.f321dce3_postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094_882bf590 764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f], namespace: "docker") I0125 05:11:58.758129 4678 handler.go:325] Added event &{/system.slice/docker-764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f.scope 2017-01-25 05:11:58.06986676 -0500 EST containerCreation {}} I0125 05:11:58.758208 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b3924f08\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-deployer\x2dtoken\x2dr7jj8.mount: invalid container name I0125 05:11:58.758218 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3924f08\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:11:58.758238 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3924f08\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount", but ignoring. I0125 05:11:58.758249 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3924f08\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:11:58.758290 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b39c8e4d\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-deployer\x2dtoken\x2dr7jj8.mount: invalid container name I0125 05:11:58.758295 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b39c8e4d\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:11:58.758306 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b39c8e4d\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount", but ignoring. I0125 05:11:58.758319 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b39c8e4d\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:11:58.758356 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b3e8deb7\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-deployer\x2dtoken\x2dr7jj8.mount: invalid container name I0125 05:11:58.758360 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3e8deb7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:11:58.758376 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3e8deb7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount", but ignoring. I0125 05:11:58.758392 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3e8deb7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:11:58.758837 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy/deployment podIP: "172.17.0.2" creating hosts mount: true I0125 05:11:58.759816 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc425734b00 Mounts:[] Config:0xc42ef79d40 NetworkSettings:0xc433401400} I0125 05:11:58.759933 4678 container.go:407] Start housekeeping for container "/system.slice/docker-764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f.scope" I0125 05:11:58.794926 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a.scope" I0125 05:11:58.795647 4678 generic.go:342] PLEG: Write status for postgresql-slave-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-slave-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.2", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42c775960)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:11:58.795769 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f"} I0125 05:11:58.796436 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-deploy", UID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10970", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Pulled' Container image "openshift/origin-deployer:86a9783" already present on machine I0125 05:11:58.797296 4678 audit.go:125] 2017-01-25T05:11:58.797256693-05:00 AUDIT: id="340a9b00-2735-4f13-a0b5-60136238e7d5" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:58.801725 4678 audit.go:45] 2017-01-25T05:11:58.801702572-05:00 AUDIT: id="340a9b00-2735-4f13-a0b5-60136238e7d5" response="201" I0125 05:11:58.801850 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.9347ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:58.832674 4678 hairpin.go:110] Enabling hairpin on interface veth021504f I0125 05:11:58.832788 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.4" I0125 05:11:58.832815 4678 docker_manager.go:2293] Creating container &{Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-master-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc4335a20c0 Stdin:false StdinOnce:false TTY:false} in pod postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:11:58.852769 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy/deployment podIP: "172.17.0.4" creating hosts mount: true I0125 05:11:58.853543 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-deploy", UID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10959", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Pulled' Container image "openshift/origin-deployer:86a9783" already present on machine I0125 05:11:58.854421 4678 audit.go:125] 2017-01-25T05:11:58.854379498-05:00 AUDIT: id="091b58b1-26b5-4b17-9401-889662232009" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:58.857057 4678 audit.go:45] 2017-01-25T05:11:58.857042512-05:00 AUDIT: id="091b58b1-26b5-4b17-9401-889662232009" response="201" I0125 05:11:58.857118 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.04443ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:58.985033 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy/deployment: setting entrypoint "[]" and command "[]" I0125 05:11:59.084567 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy/deployment: setting entrypoint "[]" and command "[]" I0125 05:11:59.136595 4678 audit.go:125] 2017-01-25T05:11:59.136550668-05:00 AUDIT: id="d8318abd-177d-45f7-9221-47e0a5551954" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:11:59.137663 4678 audit.go:45] 2017-01-25T05:11:59.137647478-05:00 AUDIT: id="d8318abd-177d-45f7-9221-47e0a5551954" response="200" I0125 05:11:59.137758 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.683545ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:11:59.138069 4678 controller.go:106] Found 0 cronjobs I0125 05:11:59.140157 4678 audit.go:125] 2017-01-25T05:11:59.140129077-05:00 AUDIT: id="739739b8-0471-4df3-a167-9c6f7ef3a10b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:11:59.141164 4678 audit.go:45] 2017-01-25T05:11:59.141151127-05:00 AUDIT: id="739739b8-0471-4df3-a167-9c6f7ef3a10b" response="200" I0125 05:11:59.141245 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.891678ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:11:59.141459 4678 controller.go:114] Found 0 jobs I0125 05:11:59.141469 4678 controller.go:117] Found 0 groups I0125 05:11:59.260989 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:11:59.261018 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:11:59.261626 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:11:59.261639 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:11:59.285992 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42a645de0 -1 [] true false map[] 0xc429387c20 } I0125 05:11:59.286038 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:11:59.286118 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42a645f20 -1 [] true false map[] 0xc42ec76c30 } I0125 05:11:59.286138 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded E0125 05:11:59.385117 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b.log: no such file or directory I0125 05:11:59.385173 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:11:59.385250 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:11:59.386045 4678 manager.go:898] Added container: "/system.slice/docker-b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a.scope" (aliases: [k8s_POD.f321dce3_postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094_6c0f26e5 b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a], namespace: "docker") I0125 05:11:59.386244 4678 handler.go:325] Added event &{/system.slice/docker-b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a.scope 2017-01-25 05:11:58.486867512 -0500 EST containerCreation {}} I0125 05:11:59.386336 4678 container.go:407] Start housekeeping for container "/system.slice/docker-b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a.scope" I0125 05:11:59.566385 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b.scope" I0125 05:11:59.581978 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-deploy", UID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10970", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Created' Created container with docker id 269d8959d1b2; Security:[seccomp=unconfined] I0125 05:11:59.582706 4678 audit.go:125] 2017-01-25T05:11:59.582658357-05:00 AUDIT: id="3f2a4c9d-64f0-4b24-88eb-92901ca982d4" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:59.586599 4678 audit.go:45] 2017-01-25T05:11:59.586575893-05:00 AUDIT: id="3f2a4c9d-64f0-4b24-88eb-92901ca982d4" response="201" I0125 05:11:59.586679 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.311428ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:11:59.684623 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:11:59.730165 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (749ns) I0125 05:11:59.854184 4678 hairpin.go:110] Enabling hairpin on interface veth66577c4 I0125 05:11:59.854332 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.5" I0125 05:11:59.854360 4678 docker_manager.go:2293] Creating container &{Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-helper-1 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42c5a9740 Stdin:false StdinOnce:false TTY:false} in pod postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:11:59.918870 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy/deployment podIP: "172.17.0.5" creating hosts mount: true I0125 05:11:59.919129 4678 manager.go:898] Added container: "/system.slice/docker-c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b.scope" (aliases: [k8s_POD.f321dce3_postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094_ae1dbcf3 c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b], namespace: "docker") I0125 05:11:59.919419 4678 handler.go:325] Added event &{/system.slice/docker-c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b.scope 2017-01-25 05:11:59.047868524 -0500 EST containerCreation {}} I0125 05:11:59.919556 4678 container.go:407] Start housekeeping for container "/system.slice/docker-c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b.scope" I0125 05:11:59.919877 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1-deploy", UID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10986", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Pulled' Container image "openshift/origin-deployer:86a9783" already present on machine I0125 05:11:59.920580 4678 audit.go:125] 2017-01-25T05:11:59.920540033-05:00 AUDIT: id="7c3a8566-7d4b-4b89-8d4b-a3d4d7b72cfd" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:11:59.946319 4678 audit.go:45] 2017-01-25T05:11:59.946294641-05:00 AUDIT: id="7c3a8566-7d4b-4b89-8d4b-a3d4d7b72cfd" response="201" I0125 05:11:59.946464 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (26.262122ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:00.017309 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy/deployment: setting entrypoint "[]" and command "[]" I0125 05:12:00.120560 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-deploy", UID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10959", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Created' Created container with docker id a14e6dfd9e5b; Security:[seccomp=unconfined] I0125 05:12:00.121261 4678 audit.go:125] 2017-01-25T05:12:00.121217024-05:00 AUDIT: id="86d70055-2ac3-4260-9334-6666779b6b2c" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:00.126031 4678 audit.go:45] 2017-01-25T05:12:00.126009443-05:00 AUDIT: id="86d70055-2ac3-4260-9334-6666779b6b2c" response="201" I0125 05:12:00.126106 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (5.189883ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:00.169852 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-deploy", UID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10970", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Started' Started container with docker id 269d8959d1b2 I0125 05:12:00.170460 4678 audit.go:125] 2017-01-25T05:12:00.170415236-05:00 AUDIT: id="6512a9dc-b9b9-424b-94c0-4a3394ca6fdf" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:00.173642 4678 generic.go:145] GenericPLEG: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b: non-existent -> running I0125 05:12:00.173665 4678 generic.go:145] GenericPLEG: b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a: non-existent -> running I0125 05:12:00.173687 4678 generic.go:145] GenericPLEG: b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d: non-existent -> running I0125 05:12:00.175054 4678 audit.go:45] 2017-01-25T05:12:00.175036776-05:00 AUDIT: id="6512a9dc-b9b9-424b-94c0-4a3394ca6fdf" response="201" I0125 05:12:00.175122 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.97011ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:00.213105 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (579ns) I0125 05:12:00.292458 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d.scope" I0125 05:12:00.463145 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (630ns) E0125 05:12:00.532605 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment": symlink /var/log/containers/postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_deployment-269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d.log: no such file or directory I0125 05:12:00.602747 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-deploy", UID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10959", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Started' Started container with docker id a14e6dfd9e5b I0125 05:12:00.603892 4678 audit.go:125] 2017-01-25T05:12:00.603848617-05:00 AUDIT: id="97defbbf-e012-4f1d-990c-f7caeccac79f" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:00.606670 4678 audit.go:45] 2017-01-25T05:12:00.60665514-05:00 AUDIT: id="97defbbf-e012-4f1d-990c-f7caeccac79f" response="201" I0125 05:12:00.606726 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.172392ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] E0125 05:12:00.724852 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment": symlink /var/log/containers/postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_deployment-a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f.log: no such file or directory I0125 05:12:00.725710 4678 manager.go:898] Added container: "/system.slice/docker-269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d.scope" (aliases: [k8s_deployment.46f5d329_postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094_2f5e0764 269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d], namespace: "docker") I0125 05:12:00.725950 4678 handler.go:325] Added event &{/system.slice/docker-269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d.scope 2017-01-25 05:11:59.907870076 -0500 EST containerCreation {}} I0125 05:12:00.726078 4678 container.go:407] Start housekeeping for container "/system.slice/docker-269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d.scope" I0125 05:12:00.747922 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f.scope" I0125 05:12:00.876032 4678 audit.go:125] 2017-01-25T05:12:00.875981433-05:00 AUDIT: id="fbe76c6b-fb79-4509-8415-f1e6e6efba98" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:00.877320 4678 audit.go:45] 2017-01-25T05:12:00.877304033-05:00 AUDIT: id="fbe76c6b-fb79-4509-8415-f1e6e6efba98" response="200" I0125 05:12:00.877794 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (4.364828ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:12:00.885750 4678 audit.go:125] 2017-01-25T05:12:00.885688847-05:00 AUDIT: id="8564fea3-452f-40a8-9535-ee8c1072b6d9" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-slave" I0125 05:12:00.887681 4678 audit.go:45] 2017-01-25T05:12:00.88765838-05:00 AUDIT: id="8564fea3-452f-40a8-9535-ee8c1072b6d9" response="200" I0125 05:12:00.888137 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-slave: (4.879109ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:12:00.898373 4678 audit.go:125] 2017-01-25T05:12:00.898317463-05:00 AUDIT: id="caa92c8e-d0a9-4d7a-abbe-589bd10d33e7" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:00.899551 4678 audit.go:45] 2017-01-25T05:12:00.899534161-05:00 AUDIT: id="caa92c8e-d0a9-4d7a-abbe-589bd10d33e7" response="200" I0125 05:12:00.900001 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (7.602844ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:12:00.906440 4678 audit.go:125] 2017-01-25T05:12:00.906395914-05:00 AUDIT: id="9b1368d5-bdb4-4ea4-813b-63705bd0d9d3" ip="172.17.0.2" method="PUT" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:00.910555 4678 audit.go:45] 2017-01-25T05:12:00.910537631-05:00 AUDIT: id="9b1368d5-bdb4-4ea4-813b-63705bd0d9d3" response="200" I0125 05:12:00.911285 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->1 I0125 05:12:00.911351 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 either never recorded expectations, or the ttl expired. I0125 05:12:00.911385 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (7.220787ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:12:00.911383 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:1, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1", timestamp:time.Time{sec:63620935920, nsec:911379537, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:00.911427 4678 replication_controller.go:541] Too few "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-slave-1" replicas, need 1, creating 1 I0125 05:12:00.911687 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:12:00.914666 4678 audit.go:125] 2017-01-25T05:12:00.9146221-05:00 AUDIT: id="0b901aba-3888-484c-a80a-ba76a48a496a" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:12:00.915115 4678 audit.go:125] 2017-01-25T05:12:00.915076785-05:00 AUDIT: id="84ee58b1-3878-429c-9f87-1e4d785fbad2" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:00.915820 4678 audit.go:125] 2017-01-25T05:12:00.915789221-05:00 AUDIT: id="f431d61f-a25e-4217-af8b-bb96771a2702" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/images/sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:12:00.917576 4678 audit.go:45] 2017-01-25T05:12:00.917562568-05:00 AUDIT: id="f431d61f-a25e-4217-af8b-bb96771a2702" response="200" I0125 05:12:00.917914 4678 audit.go:45] 2017-01-25T05:12:00.917898083-05:00 AUDIT: id="84ee58b1-3878-429c-9f87-1e4d785fbad2" response="200" I0125 05:12:00.918324 4678 panics.go:76] GET /oapi/v1/images/sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389: (2.762988ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:00.918955 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (4.130464ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:00.919350 4678 admission.go:77] getting security context constraints for pod (generate: postgresql-slave-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:replication-controller cab49cde-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:12:00.919386 4678 admission.go:88] getting security context constraints for pod (generate: postgresql-slave-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:00.919530 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:12:00.919697 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 1->0 I0125 05:12:00.920151 4678 audit.go:125] 2017-01-25T05:12:00.920113199-05:00 AUDIT: id="b3607752-370a-48f0-b967-f5a9afe133c6" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:00.920595 4678 audit.go:125] 2017-01-25T05:12:00.920561839-05:00 AUDIT: id="8870bfa5-b8c1-4c93-a948-3df41a97a02b" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:00.921136 4678 audit.go:45] 2017-01-25T05:12:00.921122649-05:00 AUDIT: id="b3607752-370a-48f0-b967-f5a9afe133c6" response="200" I0125 05:12:00.921282 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.319407ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:00.921564 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:00.921575 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:00.921582 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:00.921603 4678 admission.go:149] validating pod (generate: postgresql-slave-1-) against providers restricted I0125 05:12:00.921662 4678 admission.go:116] pod (generate: postgresql-slave-1-) validated against provider restricted I0125 05:12:00.923010 4678 audit.go:45] 2017-01-25T05:12:00.922995939-05:00 AUDIT: id="8870bfa5-b8c1-4c93-a948-3df41a97a02b" response="201" I0125 05:12:00.923060 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.724985ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:00.923064 4678 audit.go:45] 2017-01-25T05:12:00.923052974-05:00 AUDIT: id="0b901aba-3888-484c-a80a-ba76a48a496a" response="201" I0125 05:12:00.923133 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (10.604043ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:00.923517 4678 controller_utils.go:512] Controller postgresql-slave-1 created pod postgresql-slave-1-qt1rc I0125 05:12:00.923571 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1, replicas 0->0 (need 1), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:12:00.924002 4678 factory.go:488] About to try and schedule pod postgresql-slave-1-qt1rc I0125 05:12:00.924014 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc I0125 05:12:00.924082 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1", UID:"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10998", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: postgresql-slave-1-qt1rc I0125 05:12:00.924216 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1", timestamp:time.Time{sec:63620935920, nsec:911379537, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:00.924257 4678 replica_set.go:288] Pod postgresql-slave-1-qt1rc created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-qt1rc", GenerateName:"postgresql-slave-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11001", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935920, nsec:921802332, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"name":"postgresql-slave", "app":"pg-replica-example", "deployment":"postgresql-slave-1", "deploymentconfig":"postgresql-slave"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-slave", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-slave-1\",\"uid\":\"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"10998\"}}\n"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc433162060), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42a4fdb00), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-slave", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-slave"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_SERVICE_NAME", Value:"postgresql-master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42a4fdb90), ReadinessProbe:(*api.Probe)(0xc42a4fdbc0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42a4fdbf0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc433162180), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc42d9323c0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:12:00.924538 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:00.924576 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:00.924594 4678 daemoncontroller.go:309] Pod postgresql-slave-1-qt1rc added. I0125 05:12:00.924625 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:00.924639 4678 disruption.go:314] addPod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:00.924658 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:00.924664 4678 disruption.go:317] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:00.924916 4678 pet_set.go:160] Pod postgresql-slave-1-qt1rc created, labels: map[name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave] I0125 05:12:00.924944 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:00.925141 4678 factory.go:648] Attempting to bind postgresql-slave-1-qt1rc to 172.18.7.222 I0125 05:12:00.925800 4678 audit.go:125] 2017-01-25T05:12:00.925764811-05:00 AUDIT: id="6423bb60-83f5-41e7-86e8-e0260f408396" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:12:00.927906 4678 audit.go:45] 2017-01-25T05:12:00.927893215-05:00 AUDIT: id="6423bb60-83f5-41e7-86e8-e0260f408396" response="201" I0125 05:12:00.927956 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (2.42339ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:00.930050 4678 config.go:281] Setting pods for source api I0125 05:12:00.931151 4678 config.go:397] Receiving a new pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:00.931230 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:00.931632 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:00.932265 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:00.934463 4678 replication_controller.go:378] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11001 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11002 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1] Annotations:map[openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:00.934614 4678 replica_set.go:320] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11001 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11002 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:00.934714 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:00.934743 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:00.934765 4678 daemoncontroller.go:332] Pod postgresql-slave-1-qt1rc updated. I0125 05:12:00.934794 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:00.934814 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:00.934830 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:00.934835 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:00.934915 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:00.936089 4678 audit.go:125] 2017-01-25T05:12:00.93604959-05:00 AUDIT: id="b3991a7d-6228-44c9-bb72-3ab2a97632e7" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:00.936975 4678 audit.go:125] 2017-01-25T05:12:00.936953454-05:00 AUDIT: id="287436e1-abd3-4714-b135-3ee8b74f0a04" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:00.937956 4678 audit.go:125] 2017-01-25T05:12:00.93793096-05:00 AUDIT: id="c74f2ba1-3c98-4b6e-9f02-f6bb249d516a" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:00.939588 4678 audit.go:125] 2017-01-25T05:12:00.939552022-05:00 AUDIT: id="e8606993-68b4-4523-b644-a3132cf777ae" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status" I0125 05:12:00.939862 4678 audit.go:125] 2017-01-25T05:12:00.939829633-05:00 AUDIT: id="f0bc29f6-5c0b-43d6-9e17-25ecbbf1a208" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:00.940188 4678 audit.go:125] 2017-01-25T05:12:00.940151795-05:00 AUDIT: id="ddbfa174-fcd4-4926-b787-a07b6a8484c0" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:00.940544 4678 audit.go:45] 2017-01-25T05:12:00.940530562-05:00 AUDIT: id="e8606993-68b4-4523-b644-a3132cf777ae" response="409" I0125 05:12:00.940598 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status: (16.123949ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:00.940914 4678 audit.go:45] 2017-01-25T05:12:00.940899952-05:00 AUDIT: id="c74f2ba1-3c98-4b6e-9f02-f6bb249d516a" response="200" I0125 05:12:00.940995 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (3.236331ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:00.941774 4678 audit.go:45] 2017-01-25T05:12:00.941761263-05:00 AUDIT: id="f0bc29f6-5c0b-43d6-9e17-25ecbbf1a208" response="200" I0125 05:12:00.941832 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (16.044918ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:00.943126 4678 audit.go:45] 2017-01-25T05:12:00.943111782-05:00 AUDIT: id="287436e1-abd3-4714-b135-3ee8b74f0a04" response="201" I0125 05:12:00.943177 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (6.435544ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:00.943282 4678 audit.go:125] 2017-01-25T05:12:00.943251589-05:00 AUDIT: id="c0953618-58f0-4b42-87d3-c0b21a78d9a0" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status" I0125 05:12:00.943503 4678 audit.go:45] 2017-01-25T05:12:00.943490049-05:00 AUDIT: id="ddbfa174-fcd4-4926-b787-a07b6a8484c0" response="200" I0125 05:12:00.943899 4678 audit.go:45] 2017-01-25T05:12:00.943885787-05:00 AUDIT: id="b3991a7d-6228-44c9-bb72-3ab2a97632e7" response="201" I0125 05:12:00.943951 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (18.98545ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:00.944239 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:00.944317 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (14.821049ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:12:00.945850 4678 audit.go:45] 2017-01-25T05:12:00.945832099-05:00 AUDIT: id="c0953618-58f0-4b42-87d3-c0b21a78d9a0" response="200" I0125 05:12:00.945932 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status: (2.93467ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:00.946235 4678 config.go:281] Setting pods for source api I0125 05:12:00.947472 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:00.947773 4678 status_manager.go:425] Status for pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-slave]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc425ae2000 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-slave State:{Waiting:0xc42dc53fe0 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID: ContainerID:}]} version:1 podName:postgresql-slave-1-qt1rc podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:00.947854 4678 replication_controller.go:378] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11002 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11005 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:00.947996 4678 audit.go:125] 2017-01-25T05:12:00.947966426-05:00 AUDIT: id="4ef93ce3-e262-4099-82b8-a16d6f34aa9e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:00.947992 4678 replica_set.go:320] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11002 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11005 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:00.948091 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:00.948119 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:00.948140 4678 daemoncontroller.go:332] Pod postgresql-slave-1-qt1rc updated. I0125 05:12:00.948166 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:00.948188 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:00.948220 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:00.948226 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:00.948314 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:00.948385 4678 audit.go:125] 2017-01-25T05:12:00.948352988-05:00 AUDIT: id="83be242b-56ca-453c-8ba7-df003c2354b6" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:00.949065 4678 audit.go:45] 2017-01-25T05:12:00.949051313-05:00 AUDIT: id="4ef93ce3-e262-4099-82b8-a16d6f34aa9e" response="200" I0125 05:12:00.949280 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (6.793545ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:00.949532 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1, replicas 0->0 (need 0), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:12:00.950332 4678 audit.go:45] 2017-01-25T05:12:00.950318991-05:00 AUDIT: id="83be242b-56ca-453c-8ba7-df003c2354b6" response="200" I0125 05:12:00.950384 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (5.770096ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:00.950886 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:00.951037 4678 proxier.go:804] Syncing iptables rules I0125 05:12:00.951050 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:00.963056 4678 audit.go:125] 2017-01-25T05:12:00.962980684-05:00 AUDIT: id="2ee7132c-7843-4f14-a27e-ad831847083a" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status" I0125 05:12:00.969232 4678 audit.go:45] 2017-01-25T05:12:00.969209232-05:00 AUDIT: id="2ee7132c-7843-4f14-a27e-ad831847083a" response="200" I0125 05:12:00.970549 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status: (20.579378ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:00.972113 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:12:00.972571 4678 audit.go:125] 2017-01-25T05:12:00.972531439-05:00 AUDIT: id="2b3f2167-4768-4070-9883-9e3800fca465" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:00.973708 4678 audit.go:45] 2017-01-25T05:12:00.973693629-05:00 AUDIT: id="2b3f2167-4768-4070-9883-9e3800fca465" response="200" I0125 05:12:00.974128 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (9.061725ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:12:00.974660 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (49.942269ms) I0125 05:12:00.975007 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (63.665142ms) I0125 05:12:00.975078 4678 replication_controller.go:585] Too many "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-slave-1" replicas, need 0, deleting 1 I0125 05:12:00.975094 4678 controller_utils.go:306] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 waiting on deletions for: [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc] I0125 05:12:00.975106 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:0, del:1, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1", timestamp:time.Time{sec:63620935920, nsec:975103834, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:00.975145 4678 controller_utils.go:523] Controller postgresql-slave-1 deleting pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc I0125 05:12:00.976314 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->0 I0125 05:12:00.978308 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:01.001639 4678 audit.go:125] 2017-01-25T05:12:01.001583013-05:00 AUDIT: id="81ad59c1-9bcc-4f4d-abcf-0849d340d792" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:01.002239 4678 audit.go:125] 2017-01-25T05:12:01.00219365-05:00 AUDIT: id="92aaf874-4bc0-47ee-89a1-67047588ca75" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:01.004338 4678 audit.go:125] 2017-01-25T05:12:01.0042851-05:00 AUDIT: id="30a9a03b-e717-4d92-b9a8-9dc810369526" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1%2Cdeploymentconfig%3Dpostgresql-slave%2Cname%3Dpostgresql-slave&resourceVersion=0" I0125 05:12:01.004753 4678 audit.go:45] 2017-01-25T05:12:01.004740798-05:00 AUDIT: id="30a9a03b-e717-4d92-b9a8-9dc810369526" response="200" I0125 05:12:01.005043 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1%2Cdeploymentconfig%3Dpostgresql-slave%2Cname%3Dpostgresql-slave&resourceVersion=0: (7.643929ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:12:01.005425 4678 audit.go:45] 2017-01-25T05:12:01.005411874-05:00 AUDIT: id="81ad59c1-9bcc-4f4d-abcf-0849d340d792" response="200" I0125 05:12:01.005504 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (28.173522ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:01.006339 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:01.021768 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:01.021864 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:01.021895 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:01.021906 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:01.021916 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:01.021924 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:01.021933 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:01.022774 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:01.028234 4678 audit.go:45] 2017-01-25T05:12:01.028211725-05:00 AUDIT: id="92aaf874-4bc0-47ee-89a1-67047588ca75" response="200" I0125 05:12:01.028388 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (50.465569ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:01.030446 4678 replication_controller.go:378] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11005 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11008 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc42d07d6d8 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:01.030548 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST, labels map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example]. I0125 05:12:01.030627 4678 controller_utils.go:320] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 received delete for pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc I0125 05:12:01.030644 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1", timestamp:time.Time{sec:63620935920, nsec:975103834, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:01.030695 4678 replica_set.go:320] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11005 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11008 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc42d07d6d8 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:01.030778 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-qt1rc", GenerateName:"postgresql-slave-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11008", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935920, nsec:921802332, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42c1a6780), DeletionGracePeriodSeconds:(*int64)(0xc42d07d6d8), Labels:map[string]string{"app":"pg-replica-example", "deployment":"postgresql-slave-1", "deploymentconfig":"postgresql-slave", "name":"postgresql-slave"}, Annotations:map[string]string{"kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-slave-1\",\"uid\":\"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"10998\"}}\n", "openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-slave"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc42d07d790), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc429833830), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-slave", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-slave"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_SERVICE_NAME", Value:"postgresql-master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc4298339b0), ReadinessProbe:(*api.Probe)(0xc429833da0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc429833dd0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc42d07da70), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc433f7b740), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-slave]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc42c1a6ae0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-slave", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(0xc42c1a6b00), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"", ContainerID:""}}}}. I0125 05:12:01.031118 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:01.031163 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:01.031188 4678 daemoncontroller.go:332] Pod postgresql-slave-1-qt1rc updated. I0125 05:12:01.031239 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:01.031271 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:01.031287 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:01.031292 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:01.031431 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:01.031899 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1, replicas 0->1 (need 0), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->3 I0125 05:12:01.032861 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1", UID:"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10999", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: postgresql-slave-1-qt1rc I0125 05:12:01.034035 4678 config.go:281] Setting pods for source api I0125 05:12:01.035277 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:01.039383 4678 audit.go:125] 2017-01-25T05:12:01.039310554-05:00 AUDIT: id="5fe24590-df71-4090-b86e-a1ea32135404" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:01.040317 4678 audit.go:45] 2017-01-25T05:12:01.040303177-05:00 AUDIT: id="5fe24590-df71-4090-b86e-a1ea32135404" response="200" I0125 05:12:01.040389 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (13.060877ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:01.040520 4678 audit.go:125] 2017-01-25T05:12:01.040482129-05:00 AUDIT: id="f69ba66f-bcaf-46aa-b9dc-7a0df642ea3c" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1%2Cdeploymentconfig%3Dpostgresql-slave%2Cname%3Dpostgresql-slave&resourceVersion=11005&timeoutSeconds=537" I0125 05:12:01.041131 4678 audit.go:45] 2017-01-25T05:12:01.041118071-05:00 AUDIT: id="f69ba66f-bcaf-46aa-b9dc-7a0df642ea3c" response="200" I0125 05:12:01.041459 4678 audit.go:125] 2017-01-25T05:12:01.041417138-05:00 AUDIT: id="7fb4f161-0225-46dd-9dae-ef2eab903d24" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status" I0125 05:12:01.042113 4678 audit.go:125] 2017-01-25T05:12:01.042081511-05:00 AUDIT: id="2d671017-2323-4fba-b3c5-4aaa38f571e3" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:01.042167 4678 audit.go:45] 2017-01-25T05:12:01.042153959-05:00 AUDIT: id="7fb4f161-0225-46dd-9dae-ef2eab903d24" response="409" I0125 05:12:01.042233 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status: (5.976675ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:01.042506 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (67.804163ms) I0125 05:12:01.046366 4678 audit.go:125] 2017-01-25T05:12:01.046331873-05:00 AUDIT: id="b68b0235-87a4-4b97-9ed0-49c8713b2ff9" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:01.046720 4678 audit.go:125] 2017-01-25T05:12:01.046688533-05:00 AUDIT: id="33b49f3a-0e75-4533-920e-891251cedc3c" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:12:01.047113 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/empty-dir/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-data" (spec.Name: "postgresql-data") pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:12:01.047137 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:12:01.047502 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:01.069054 4678 audit.go:45] 2017-01-25T05:12:01.06902943-05:00 AUDIT: id="b68b0235-87a4-4b97-9ed0-49c8713b2ff9" response="200" I0125 05:12:01.069181 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (26.080477ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:01.069829 4678 audit.go:45] 2017-01-25T05:12:01.069815354-05:00 AUDIT: id="33b49f3a-0e75-4533-920e-891251cedc3c" response="200" I0125 05:12:01.070297 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (26.691333ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:01.071536 4678 audit.go:45] 2017-01-25T05:12:01.071522204-05:00 AUDIT: id="2d671017-2323-4fba-b3c5-4aaa38f571e3" response="201" I0125 05:12:01.071633 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (34.922401ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:01.072560 4678 iptables.go:362] running iptables -N [KUBE-MARK-DROP -t nat] I0125 05:12:01.086064 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:01.086993 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1, replicas 0->1 (need 0), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->3 I0125 05:12:01.094386 4678 audit.go:125] 2017-01-25T05:12:01.094325434-05:00 AUDIT: id="fc8baa11-47e7-4ccb-adae-9b51f332faf6" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:01.095147 4678 audit.go:45] 2017-01-25T05:12:01.095134926-05:00 AUDIT: id="fc8baa11-47e7-4ccb-adae-9b51f332faf6" response="200" I0125 05:12:01.095225 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (5.645256ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:01.095364 4678 audit.go:125] 2017-01-25T05:12:01.095328599-05:00 AUDIT: id="e126e958-670a-4ddd-ab80-0f0f75a5aa30" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status" I0125 05:12:01.096577 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (54.026735ms) I0125 05:12:01.100791 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->0 I0125 05:12:01.100804 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-slave-1, 0->1 I0125 05:12:01.101122 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:12:01.102602 4678 audit.go:125] 2017-01-25T05:12:01.102560633-05:00 AUDIT: id="5b7daa65-8d41-48b0-b1f6-2cc78b97c8d2" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status" I0125 05:12:01.105278 4678 audit.go:45] 2017-01-25T05:12:01.105263179-05:00 AUDIT: id="5b7daa65-8d41-48b0-b1f6-2cc78b97c8d2" response="200" I0125 05:12:01.105404 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status: (3.103294ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:01.105636 4678 iptables.go:362] running iptables -C [KUBE-MARK-DROP -t nat -j MARK --set-xmark 0x00008000/0x00008000] I0125 05:12:01.116131 4678 audit.go:45] 2017-01-25T05:12:01.116087585-05:00 AUDIT: id="e126e958-670a-4ddd-ab80-0f0f75a5aa30" response="200" I0125 05:12:01.117174 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc434887b80 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/2f5e0764 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42f1527e0 NetworkSettings:0xc4242fe600} I0125 05:12:01.118014 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" (observed generation: 2) I0125 05:12:01.119044 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:12:01.119769 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status: (29.416162ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:01.120388 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (145.342646ms) I0125 05:12:01.120487 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc in state Pending, deletion time 2017-01-25 05:12:31.005970671 -0500 EST I0125 05:12:01.120518 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 3->3 I0125 05:12:01.123778 4678 audit.go:125] 2017-01-25T05:12:01.123730546-05:00 AUDIT: id="29b6d83a-d3f0-423d-a1b5-9a753bb63c06" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status" I0125 05:12:01.127180 4678 audit.go:45] 2017-01-25T05:12:01.127163508-05:00 AUDIT: id="29b6d83a-d3f0-423d-a1b5-9a753bb63c06" response="200" I0125 05:12:01.127327 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:01.144996 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:12:01.147517 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1-deploy", UID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10986", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Created' Created container with docker id 57a48f33a7e9; Security:[seccomp=unconfined] I0125 05:12:01.148917 4678 audit.go:125] 2017-01-25T05:12:01.148874973-05:00 AUDIT: id="2f0f7d2d-dcec-494b-b491-abb4a5306c85" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:01.150673 4678 audit.go:125] 2017-01-25T05:12:01.150626506-05:00 AUDIT: id="25e48d27-d2bc-4d10-949a-25a7c241aa4f" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status" I0125 05:12:01.152097 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->0 I0125 05:12:01.152111 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-slave-1, 1->0 I0125 05:12:01.153597 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") to pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:01.153673 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/empty-dir/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-data" (spec.Name: "postgresql-data") to pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:01.171795 4678 audit.go:45] 2017-01-25T05:12:01.171755427-05:00 AUDIT: id="25e48d27-d2bc-4d10-949a-25a7c241aa4f" response="200" I0125 05:12:01.172035 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status: (21.622347ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:01.175094 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" (observed generation: 2) I0125 05:12:01.176728 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:12:01.178729 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4350131e0 Mounts:[] Config:0xc430508c60 NetworkSettings:0xc422894900} I0125 05:12:01.178972 4678 iptables.go:362] running iptables -N [KUBE-FIREWALL -t filter] I0125 05:12:01.202315 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1/status: (81.09025ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:01.202717 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:01.203340 4678 empty_dir.go:248] pod b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_default-token-0g2nw I0125 05:12:01.203358 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw tmpfs [] with command: "mount" I0125 05:12:01.203368 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw]) I0125 05:12:01.234071 4678 audit.go:45] 2017-01-25T05:12:01.234037593-05:00 AUDIT: id="2f0f7d2d-dcec-494b-b491-abb4a5306c85" response="201" I0125 05:12:01.234212 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (85.647076ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:01.234937 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (114.486648ms) I0125 05:12:01.235076 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc in state Pending, deletion time 2017-01-25 05:12:31.005970671 -0500 EST I0125 05:12:01.235109 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (109.275µs) I0125 05:12:01.247855 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:01.284764 4678 iptables.go:362] running iptables -C [KUBE-FIREWALL -t filter -m comment --comment kubernetes firewall for dropping marked packets -m mark --mark 0x00008000/0x00008000 -j DROP] I0125 05:12:01.301040 4678 audit.go:125] 2017-01-25T05:12:01.300968759-05:00 AUDIT: id="05f12214-a89b-44f8-a733-636686c08c2b" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:12:01.302968 4678 audit.go:45] 2017-01-25T05:12:01.302939659-05:00 AUDIT: id="05f12214-a89b-44f8-a733-636686c08c2b" response="200" I0125 05:12:01.303261 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (2.600433ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:01.303582 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:12:01.303669 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc volume default-token-0g2nw: write required for target directory /mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:01.304796 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc volume default-token-0g2nw: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw/..1981_25_01_05_12_01.175941239 I0125 05:12:01.305453 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:01.321851 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:01.352639 4678 generic.go:342] PLEG: Write status for postgresql-slave-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-slave-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.2", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a8f1ce0), (*container.ContainerStatus)(0xc42a1290a0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:01.353478 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d"} I0125 05:12:01.361570 4678 manager.go:898] Added container: "/system.slice/docker-a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f.scope" (aliases: [k8s_deployment.7770d39a_postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094_04ad5900 a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f], namespace: "docker") I0125 05:12:01.361803 4678 handler.go:325] Added event &{/system.slice/docker-a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f.scope 2017-01-25 05:12:00.33687085 -0500 EST containerCreation {}} I0125 05:12:01.361869 4678 container.go:407] Start housekeeping for container "/system.slice/docker-a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f.scope" I0125 05:12:01.378508 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:01.394251 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -j KUBE-FIREWALL] I0125 05:12:01.453503 4678 quota.go:156] XFS quota applied: device=/dev/mapper/docker--vg-openshift--xfs--vol--dir, quota=4697620480, fsGroup=1000640000 I0125 05:12:01.453553 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/empty-dir/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-data" (spec.Name: "postgresql-data") pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:01.461499 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:01.493664 4678 iptables.go:362] running iptables -C [INPUT -t filter -j KUBE-FIREWALL] I0125 05:12:01.516446 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:01.516535 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:01.566300 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:01.568117 4678 audit.go:125] 2017-01-25T05:12:01.568067309-05:00 AUDIT: id="7f2694e8-6a29-41ca-a2fb-63d87db887f9" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:12:01.570127 4678 audit.go:45] 2017-01-25T05:12:01.570107667-05:00 AUDIT: id="7f2694e8-6a29-41ca-a2fb-63d87db887f9" response="200" I0125 05:12:01.570417 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.667906ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:01.570761 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:12:01.570832 4678 docker_manager.go:1992] Container {Name:postgresql-slave Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-slave] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_SERVICE_NAME Value:postgresql-master ValueFrom:} {Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc42cb02330 ReadinessProbe:0xc42cb02360 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42cb02390 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:12:01.570862 4678 docker_manager.go:2086] Got container changes for pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:postgresql-slave Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-slave] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_SERVICE_NAME Value:postgresql-master ValueFrom:} {Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc42cb02330 ReadinessProbe:0xc42cb02360 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42cb02390 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:12:01.570900 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:12:01.570927 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:01.579643 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/POD podIP: "" creating hosts mount: false I0125 05:12:01.585167 4678 iptables.go:362] running iptables -N [KUBE-MARK-MASQ -t nat] I0125 05:12:01.608435 4678 proxier.go:797] syncProxyRules took 657.392305ms I0125 05:12:01.608521 4678 proxier.go:566] OnEndpointsUpdate took 657.543941ms for 6 endpoints I0125 05:12:01.608698 4678 proxier.go:381] Received update notice: [] I0125 05:12:01.608764 4678 proxier.go:804] Syncing iptables rules I0125 05:12:01.608801 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:01.665412 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:01.693070 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:01.695655 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/POD: setting entrypoint "[]" and command "[]" I0125 05:12:01.697810 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:01.734623 4678 iptables.go:362] running iptables -C [KUBE-MARK-MASQ -t nat -j MARK --set-xmark 0x00004000/0x00004000] I0125 05:12:01.767152 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:01.802579 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1-deploy", UID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10986", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Started' Started container with docker id 57a48f33a7e9 I0125 05:12:01.802715 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:01.822180 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:01.837293 4678 audit.go:125] 2017-01-25T05:12:01.83719109-05:00 AUDIT: id="7e366aa0-7aeb-4273-98b4-2254b001169a" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" E0125 05:12:01.845119 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment": symlink /var/log/containers/postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_deployment-57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d.log: no such file or directory I0125 05:12:01.860975 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc429162f20 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/ce4eccb1 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc431428d80 NetworkSettings:0xc422298000} I0125 05:12:01.861766 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d.scope" I0125 05:12:01.862296 4678 audit.go:45] 2017-01-25T05:12:01.862275671-05:00 AUDIT: id="7e366aa0-7aeb-4273-98b4-2254b001169a" response="201" I0125 05:12:01.862483 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (25.656034ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:01.886831 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4291638c0 Mounts:[] Config:0xc431429680 NetworkSettings:0xc422298700} I0125 05:12:01.888631 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:01.914342 4678 audit.go:125] 2017-01-25T05:12:01.91428778-05:00 AUDIT: id="fce68564-990d-445e-9e9c-7e0533fd70a0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:12:01.916325 4678 audit.go:45] 2017-01-25T05:12:01.916301099-05:00 AUDIT: id="fce68564-990d-445e-9e9c-7e0533fd70a0" response="200" I0125 05:12:01.916442 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (2.416455ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:01.917399 4678 audit.go:125] 2017-01-25T05:12:01.917358839-05:00 AUDIT: id="76b82572-31b4-4bb6-bcfa-f59b811be618" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:01.918783 4678 audit.go:45] 2017-01-25T05:12:01.918769184-05:00 AUDIT: id="76b82572-31b4-4bb6-bcfa-f59b811be618" response="200" I0125 05:12:01.919278 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (7.113162ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:01.923963 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:01.939549 4678 iptables.go:362] running iptables -C [KUBE-POSTROUTING -t nat -m comment --comment kubernetes service traffic requiring SNAT -m mark --mark 0x00004000/0x00004000 -j MASQUERADE] I0125 05:12:01.977004 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:02.002499 4678 audit.go:125] 2017-01-25T05:12:02.002413203-05:00 AUDIT: id="81e6b2a6-5593-47be-a867-3cd3efb77e3d" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-master" I0125 05:12:02.005184 4678 audit.go:45] 2017-01-25T05:12:02.005164902-05:00 AUDIT: id="81e6b2a6-5593-47be-a867-3cd3efb77e3d" response="200" I0125 05:12:02.005836 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-master: (7.825303ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:02.040647 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:02.063386 4678 audit.go:125] 2017-01-25T05:12:02.063314669-05:00 AUDIT: id="e3447fa4-2ff7-47ca-8ea1-a816bfd49f70" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:02.065231 4678 audit.go:45] 2017-01-25T05:12:02.065194659-05:00 AUDIT: id="e3447fa4-2ff7-47ca-8ea1-a816bfd49f70" response="200" I0125 05:12:02.065764 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (7.30533ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:02.076809 4678 audit.go:125] 2017-01-25T05:12:02.076758245-05:00 AUDIT: id="06c1ae2d-daa9-441c-9876-e994a1f56af2" ip="172.17.0.4" method="PUT" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:02.080932 4678 audit.go:45] 2017-01-25T05:12:02.080912336-05:00 AUDIT: id="06c1ae2d-daa9-441c-9876-e994a1f56af2" response="200" I0125 05:12:02.081912 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (7.757393ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:02.082376 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:12:02.082421 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 0->1 I0125 05:12:02.082471 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 either never recorded expectations, or the ttl expired. I0125 05:12:02.082503 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:1, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1", timestamp:time.Time{sec:63620935922, nsec:82500339, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:02.082528 4678 replication_controller.go:541] Too few "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-master-1" replicas, need 1, creating 1 I0125 05:12:02.084143 4678 audit.go:125] 2017-01-25T05:12:02.084089847-05:00 AUDIT: id="ae736b65-d930-4221-b6ad-f4060683ae1c" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:02.086402 4678 audit.go:125] 2017-01-25T05:12:02.086364037-05:00 AUDIT: id="b7bf4827-5ee4-4737-a888-53e26cd026ff" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:12:02.086969 4678 admission.go:77] getting security context constraints for pod (generate: postgresql-master-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:replication-controller cab49cde-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:12:02.087005 4678 admission.go:88] getting security context constraints for pod (generate: postgresql-master-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:02.087735 4678 audit.go:125] 2017-01-25T05:12:02.087701747-05:00 AUDIT: id="acf33a5b-6930-4a55-bfdb-988225c32c32" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:02.088870 4678 audit.go:45] 2017-01-25T05:12:02.088855841-05:00 AUDIT: id="acf33a5b-6930-4a55-bfdb-988225c32c32" response="200" I0125 05:12:02.088950 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.511692ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:02.089282 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:02.089295 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:02.089309 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:02.089331 4678 admission.go:149] validating pod (generate: postgresql-master-1-) against providers restricted I0125 05:12:02.089390 4678 admission.go:116] pod (generate: postgresql-master-1-) validated against provider restricted I0125 05:12:02.091571 4678 audit.go:45] 2017-01-25T05:12:02.091556402-05:00 AUDIT: id="b7bf4827-5ee4-4737-a888-53e26cd026ff" response="201" I0125 05:12:02.091661 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (7.841751ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.092240 4678 controller_utils.go:512] Controller postgresql-master-1 created pod postgresql-master-1-6jfgj I0125 05:12:02.092304 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 0->0 (need 1), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:12:02.092620 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1", UID:"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11016", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: postgresql-master-1-6jfgj I0125 05:12:02.093191 4678 factory.go:488] About to try and schedule pod postgresql-master-1-6jfgj I0125 05:12:02.093222 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj I0125 05:12:02.093440 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1", timestamp:time.Time{sec:63620935922, nsec:82500339, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:02.093480 4678 replica_set.go:288] Pod postgresql-master-1-6jfgj created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-6jfgj", GenerateName:"postgresql-master-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11017", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:89557223, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"app":"pg-replica-example", "deployment":"postgresql-master-1", "deploymentconfig":"postgresql-master", "name":"postgresql-master"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-1", "openshift.io/generated-by":"OpenShiftNewApp", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-1\",\"uid\":\"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11016\"}}\n"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc42c4e85e0), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc428cad2f0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"X5NgRSrwacHP", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc428cad380), ReadinessProbe:(*api.Probe)(0xc428cad3b0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc428cad3e0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc42e9b6f70), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc430e4ab80), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:12:02.093798 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:12:02.093834 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:12:02.093853 4678 daemoncontroller.go:309] Pod postgresql-master-1-6jfgj added. I0125 05:12:02.093887 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:12:02.093904 4678 disruption.go:314] addPod called on pod "postgresql-master-1-6jfgj" I0125 05:12:02.093920 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:12:02.093926 4678 disruption.go:317] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:12:02.094141 4678 pet_set.go:160] Pod postgresql-master-1-6jfgj created, labels: map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] I0125 05:12:02.094169 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:12:02.094364 4678 selector_spreading.go:114] skipping pending-deleted pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc I0125 05:12:02.094421 4678 factory.go:648] Attempting to bind postgresql-master-1-6jfgj to 172.18.7.222 I0125 05:12:02.095245 4678 audit.go:125] 2017-01-25T05:12:02.095183454-05:00 AUDIT: id="8233e9b8-b18f-4668-9018-0629d8d1177b" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:12:02.102533 4678 audit.go:45] 2017-01-25T05:12:02.102513319-05:00 AUDIT: id="8233e9b8-b18f-4668-9018-0629d8d1177b" response="201" I0125 05:12:02.102598 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (7.679272ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:02.105167 4678 audit.go:125] 2017-01-25T05:12:02.105126175-05:00 AUDIT: id="0dfea9c8-7815-450b-aa74-370763f2b726" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:02.105699 4678 audit.go:125] 2017-01-25T05:12:02.105663989-05:00 AUDIT: id="a91fc904-ef8f-4b19-87e4-01f572439f61" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:12:02.106805 4678 audit.go:125] 2017-01-25T05:12:02.106770562-05:00 AUDIT: id="9aa1d008-3636-4e1d-8c8e-b749565a9112" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:02.107613 4678 config.go:281] Setting pods for source api I0125 05:12:02.108504 4678 config.go:397] Receiving a new pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.108953 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.109358 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.110019 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.110621 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11017 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11018 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:02.110771 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11017 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11018 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:02.110887 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:12:02.110920 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:12:02.110943 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:12:02.110979 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:12:02.111001 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:12:02.111018 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:12:02.111024 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:12:02.111107 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:12:02.111870 4678 audit.go:125] 2017-01-25T05:12:02.111833744-05:00 AUDIT: id="558e3731-7e1f-496f-90a1-c0db0601695c" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:02.113478 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:02.134825 4678 audit.go:125] 2017-01-25T05:12:02.134763593-05:00 AUDIT: id="b80092cf-de8c-436b-8d57-1809b392e0df" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:02.138536 4678 audit.go:45] 2017-01-25T05:12:02.138515244-05:00 AUDIT: id="ae736b65-d930-4221-b6ad-f4060683ae1c" response="200" I0125 05:12:02.138708 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (54.877284ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:02.139936 4678 audit.go:45] 2017-01-25T05:12:02.139922003-05:00 AUDIT: id="a91fc904-ef8f-4b19-87e4-01f572439f61" response="200" I0125 05:12:02.141283 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (44.75434ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.141971 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (59.514831ms) I0125 05:12:02.142079 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:12:02.142523 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 1->1 I0125 05:12:02.143074 4678 audit.go:45] 2017-01-25T05:12:02.143060806-05:00 AUDIT: id="b80092cf-de8c-436b-8d57-1809b392e0df" response="200" I0125 05:12:02.143153 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (30.085719ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.144793 4678 audit.go:45] 2017-01-25T05:12:02.144779921-05:00 AUDIT: id="9aa1d008-3636-4e1d-8c8e-b749565a9112" response="201" I0125 05:12:02.144880 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (47.811032ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.145100 4678 audit.go:45] 2017-01-25T05:12:02.14508578-05:00 AUDIT: id="558e3731-7e1f-496f-90a1-c0db0601695c" response="201" I0125 05:12:02.145153 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (33.58301ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:02.148453 4678 audit.go:125] 2017-01-25T05:12:02.148405175-05:00 AUDIT: id="9bb3a1aa-7a2a-41a8-8625-243733b6f0d1" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status" I0125 05:12:02.151098 4678 audit.go:125] 2017-01-25T05:12:02.151059894-05:00 AUDIT: id="79fde1b6-8945-496a-aa49-e731545232f9" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:12:02.151738 4678 audit.go:45] 2017-01-25T05:12:02.15172599-05:00 AUDIT: id="79fde1b6-8945-496a-aa49-e731545232f9" response="409" I0125 05:12:02.151790 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (5.455406ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.152530 4678 audit.go:45] 2017-01-25T05:12:02.152516497-05:00 AUDIT: id="9bb3a1aa-7a2a-41a8-8625-243733b6f0d1" response="200" I0125 05:12:02.152592 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 2) I0125 05:12:02.152606 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status: (4.464816ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.153491 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:02.153456 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11018 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11023 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:02.153636 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:12:02.153592 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11018 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11023 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:02.153690 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:12:02.153723 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:12:02.153750 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:12:02.153782 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:12:02.153805 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:12:02.153823 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:12:02.153828 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:12:02.153917 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:12:02.154245 4678 audit.go:45] 2017-01-25T05:12:02.154232046-05:00 AUDIT: id="0dfea9c8-7815-450b-aa74-370763f2b726" response="200" I0125 05:12:02.154186 4678 status_manager.go:425] Status for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc4283337a0 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting:0xc428333780 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID: ContainerID:}]} version:1 podName:postgresql-master-1-6jfgj podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:02.154322 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (56.702025ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:02.154659 4678 config.go:281] Setting pods for source api I0125 05:12:02.154735 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:12:02.156054 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.158735 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:02.158772 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:02.173272 4678 audit.go:125] 2017-01-25T05:12:02.173191745-05:00 AUDIT: id="371f7ea2-f2a6-4f23-b4cd-24feb9d5fce8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:02.173876 4678 audit.go:125] 2017-01-25T05:12:02.173845408-05:00 AUDIT: id="dcd0f5b6-9da4-42f6-9642-7e0b78ad274b" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:02.177553 4678 audit.go:45] 2017-01-25T05:12:02.177536717-05:00 AUDIT: id="371f7ea2-f2a6-4f23-b4cd-24feb9d5fce8" response="200" I0125 05:12:02.177813 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (22.630385ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.178502 4678 audit.go:125] 2017-01-25T05:12:02.178459073-05:00 AUDIT: id="6f22db91-68fc-4d8f-bc76-bf0faa89f7d5" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:02.178975 4678 audit.go:45] 2017-01-25T05:12:02.178963062-05:00 AUDIT: id="dcd0f5b6-9da4-42f6-9642-7e0b78ad274b" response="200" I0125 05:12:02.179044 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (23.311144ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:02.182953 4678 audit.go:45] 2017-01-25T05:12:02.182935887-05:00 AUDIT: id="6f22db91-68fc-4d8f-bc76-bf0faa89f7d5" response="200" I0125 05:12:02.184070 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (25.329582ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:02.184672 4678 audit.go:125] 2017-01-25T05:12:02.18462414-05:00 AUDIT: id="e2423ebb-7876-493d-9269-773879b7835e" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:12:02.185994 4678 audit.go:45] 2017-01-25T05:12:02.185976515-05:00 AUDIT: id="e2423ebb-7876-493d-9269-773879b7835e" response="200" I0125 05:12:02.186085 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (1.710087ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.186905 4678 audit.go:125] 2017-01-25T05:12:02.186876252-05:00 AUDIT: id="8aef89b3-4b13-4774-819b-0c30c4ccf3e8" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:02.188323 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (94.340045ms) I0125 05:12:02.188736 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:12:02.189883 4678 audit.go:45] 2017-01-25T05:12:02.189868073-05:00 AUDIT: id="8aef89b3-4b13-4774-819b-0c30c4ccf3e8" response="200" I0125 05:12:02.189975 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (3.312351ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.191052 4678 audit.go:125] 2017-01-25T05:12:02.191010751-05:00 AUDIT: id="0753ebf2-d052-4383-8622-f2e2e7b325a5" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-master-1&resourceVersion=11016" I0125 05:12:02.191545 4678 audit.go:45] 2017-01-25T05:12:02.191531425-05:00 AUDIT: id="0753ebf2-d052-4383-8622-f2e2e7b325a5" response="200" I0125 05:12:02.192834 4678 audit.go:125] 2017-01-25T05:12:02.192798763-05:00 AUDIT: id="fbbeadf2-823d-4ecc-804b-2bd0aa93b8ac" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:02.193044 4678 audit.go:125] 2017-01-25T05:12:02.19301232-05:00 AUDIT: id="bbffa841-5827-4f03-a0a8-9be87c380d3c" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:12:02.193742 4678 audit.go:45] 2017-01-25T05:12:02.193728043-05:00 AUDIT: id="fbbeadf2-823d-4ecc-804b-2bd0aa93b8ac" response="200" I0125 05:12:02.193805 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (3.922345ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:02.194070 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:12:02.196802 4678 audit.go:45] 2017-01-25T05:12:02.196787593-05:00 AUDIT: id="bbffa841-5827-4f03-a0a8-9be87c380d3c" response="200" I0125 05:12:02.197013 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (6.585456ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.197431 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (55.404611ms) I0125 05:12:02.197544 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:12:02.197864 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:12:02.197910 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 1->1 I0125 05:12:02.197919 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-master-1, 0->1 I0125 05:12:02.198881 4678 audit.go:125] 2017-01-25T05:12:02.198843181-05:00 AUDIT: id="bb5ec827-c07c-4239-af9b-7c8585db409b" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:02.199541 4678 audit.go:45] 2017-01-25T05:12:02.199527697-05:00 AUDIT: id="bb5ec827-c07c-4239-af9b-7c8585db409b" response="200" I0125 05:12:02.199600 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (4.038329ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:02.199822 4678 audit.go:125] 2017-01-25T05:12:02.199790207-05:00 AUDIT: id="d2db8ec0-787c-4c4a-a219-4d9c8ef86707" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:02.199849 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (11.466067ms) I0125 05:12:02.201275 4678 audit.go:125] 2017-01-25T05:12:02.201240461-05:00 AUDIT: id="35da2bf6-4a0d-448c-96a0-beabda49d0f5" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:12:02.201801 4678 audit.go:45] 2017-01-25T05:12:02.201787272-05:00 AUDIT: id="35da2bf6-4a0d-448c-96a0-beabda49d0f5" response="409" I0125 05:12:02.201849 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (2.94881ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.202407 4678 audit.go:45] 2017-01-25T05:12:02.202393999-05:00 AUDIT: id="d2db8ec0-787c-4c4a-a219-4d9c8ef86707" response="200" I0125 05:12:02.202533 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (2.986633ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:02.202871 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 2) I0125 05:12:02.203652 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:02.208437 4678 proxier.go:797] syncProxyRules took 599.669951ms I0125 05:12:02.208460 4678 proxier.go:431] OnServiceUpdate took 599.727967ms for 4 services I0125 05:12:02.208490 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:02.208620 4678 proxier.go:804] Syncing iptables rules I0125 05:12:02.208631 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:02.224026 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:02.224139 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:02.224157 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:02.224174 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:02.224183 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:02.224221 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:02.224238 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:02.225120 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.5", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a5a9960), (*container.ContainerStatus)(0xc42620f260)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:02.225734 4678 audit.go:125] 2017-01-25T05:12:02.225687589-05:00 AUDIT: id="0ca6bea2-fa46-4985-a5fa-3c002e1d72cc" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:02.226386 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b"} I0125 05:12:02.227777 4678 audit.go:45] 2017-01-25T05:12:02.227761907-05:00 AUDIT: id="0ca6bea2-fa46-4985-a5fa-3c002e1d72cc" response="200" I0125 05:12:02.228151 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (25.760776ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.228621 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 1->1 (need 1), fullyLabeledReplicas 1->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:12:02.231726 4678 audit.go:125] 2017-01-25T05:12:02.231683829-05:00 AUDIT: id="4c07cb07-8403-46a4-abf6-08f4ddf65a44" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:12:02.235120 4678 audit.go:45] 2017-01-25T05:12:02.235101523-05:00 AUDIT: id="4c07cb07-8403-46a4-abf6-08f4ddf65a44" response="200" I0125 05:12:02.235443 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-master-1&resourceVersion=11016: (46.808922ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:02.237270 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (7.983008ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.238015 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:12:02.238714 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (41.233973ms) I0125 05:12:02.238818 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (58.598µs) I0125 05:12:02.238993 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 1->1 I0125 05:12:02.239048 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (30.699µs) I0125 05:12:02.240648 4678 audit.go:125] 2017-01-25T05:12:02.240605202-05:00 AUDIT: id="6f024981-47cc-4c40-acc0-387e92dadbbc" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:02.241731 4678 audit.go:45] 2017-01-25T05:12:02.241717119-05:00 AUDIT: id="6f024981-47cc-4c40-acc0-387e92dadbbc" response="200" I0125 05:12:02.242141 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (6.481313ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:02.244164 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:02.282651 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:02.299651 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:12:02.299746 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/host-path/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:12:02.303791 4678 audit.go:125] 2017-01-25T05:12:02.303729058-05:00 AUDIT: id="de7675dd-17b4-4a79-aa68-43fd9505dcb1" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=0" I0125 05:12:02.304382 4678 audit.go:45] 2017-01-25T05:12:02.304369327-05:00 AUDIT: id="de7675dd-17b4-4a79-aa68-43fd9505dcb1" response="200" I0125 05:12:02.304696 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=0: (4.070417ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:02.318427 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:02.340150 4678 audit.go:125] 2017-01-25T05:12:02.340062606-05:00 AUDIT: id="c80791c0-cd2c-4843-be0d-ab42a6e57386" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=11023&timeoutSeconds=571" I0125 05:12:02.340935 4678 audit.go:45] 2017-01-25T05:12:02.34092215-05:00 AUDIT: id="c80791c0-cd2c-4843-be0d-ab42a6e57386" response="200" I0125 05:12:02.353243 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:02.390701 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:02.407690 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/host-path/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000") to pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:02.407859 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") to pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:02.407940 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:02.408465 4678 empty_dir.go:248] pod b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_default-token-0g2nw I0125 05:12:02.408481 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw tmpfs [] with command: "mount" I0125 05:12:02.408489 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw]) I0125 05:12:02.431489 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/host-path/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:02.438793 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:02.471402 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:02.486007 4678 audit.go:125] 2017-01-25T05:12:02.485939838-05:00 AUDIT: id="19f05266-8a2b-41d0-9055-2418ba7d30b0" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:12:02.488047 4678 audit.go:45] 2017-01-25T05:12:02.488028227-05:00 AUDIT: id="19f05266-8a2b-41d0-9055-2418ba7d30b0" response="200" I0125 05:12:02.488376 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (2.807016ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.488729 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:12:02.488890 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj volume default-token-0g2nw: write required for target directory /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:02.489392 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj volume default-token-0g2nw: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw/..1981_25_01_05_12_02.338739562 I0125 05:12:02.489779 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:02.498632 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:02.528777 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:02.528810 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:02.558489 4678 proxier.go:797] syncProxyRules took 349.861581ms I0125 05:12:02.558523 4678 proxier.go:566] OnEndpointsUpdate took 349.96783ms for 6 endpoints I0125 05:12:02.558582 4678 proxier.go:381] Received update notice: [] I0125 05:12:02.558631 4678 proxier.go:804] Syncing iptables rules I0125 05:12:02.558645 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:02.600305 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:02.613844 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc436fcd4a0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/04ad5900 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc431791680 NetworkSettings:0xc4215b6300} I0125 05:12:02.628954 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:02.647757 4678 manager.go:898] Added container: "/system.slice/docker-57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d.scope" (aliases: [k8s_deployment.440ed38e_postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094_ce4eccb1 57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d], namespace: "docker") I0125 05:12:02.647989 4678 handler.go:325] Added event &{/system.slice/docker-57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d.scope 2017-01-25 05:12:01.440872841 -0500 EST containerCreation {}} I0125 05:12:02.648121 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b63d7ff7\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:12:02.648131 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b63d7ff7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:02.648156 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b63d7ff7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:12:02.648171 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b63d7ff7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:02.648218 4678 container.go:407] Start housekeeping for container "/system.slice/docker-57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d.scope" I0125 05:12:02.682396 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:02.715837 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:02.733150 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.734011 4678 audit.go:125] 2017-01-25T05:12:02.733944945-05:00 AUDIT: id="edd6acfe-6892-4c38-b0d4-8f9373dee665" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:12:02.735737 4678 audit.go:45] 2017-01-25T05:12:02.735721149-05:00 AUDIT: id="edd6acfe-6892-4c38-b0d4-8f9373dee665" response="200" I0125 05:12:02.735967 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.309175ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.736278 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:12:02.736377 4678 docker_manager.go:1992] Container {Name:postgresql-master Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-master] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:} {Name:POSTGRESQL_ADMIN_PASSWORD Value:X5NgRSrwacHP ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc4269cff50 ReadinessProbe:0xc4269cff80 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc4269cffb0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:12:02.736423 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:postgresql-master Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-master] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:} {Name:POSTGRESQL_ADMIN_PASSWORD Value:X5NgRSrwacHP ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc4269cff50 ReadinessProbe:0xc4269cff80 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc4269cffb0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:12:02.736461 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:12:02.736482 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.743818 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:02.771384 4678 audit.go:125] 2017-01-25T05:12:02.771329451-05:00 AUDIT: id="07c3729c-a421-4506-aa67-6218a97f8f4b" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:02.772756 4678 audit.go:45] 2017-01-25T05:12:02.772739827-05:00 AUDIT: id="07c3729c-a421-4506-aa67-6218a97f8f4b" response="200" I0125 05:12:02.773264 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (4.686644ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:02.777907 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:02.793491 4678 audit.go:125] 2017-01-25T05:12:02.793409223-05:00 AUDIT: id="63b2498c-8c0d-43cd-9e02-43ed5f652bbb" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper" I0125 05:12:02.795770 4678 audit.go:45] 2017-01-25T05:12:02.795751216-05:00 AUDIT: id="63b2498c-8c0d-43cd-9e02-43ed5f652bbb" response="200" I0125 05:12:02.796356 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper: (20.098214ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:02.801299 4678 audit.go:125] 2017-01-25T05:12:02.801258969-05:00 AUDIT: id="9d323226-8abd-4a76-9ee7-2574d2e967ad" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:02.802493 4678 audit.go:45] 2017-01-25T05:12:02.802479933-05:00 AUDIT: id="9d323226-8abd-4a76-9ee7-2574d2e967ad" response="200" I0125 05:12:02.802883 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (3.812945ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:02.808586 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:02.828586 4678 audit.go:125] 2017-01-25T05:12:02.828514863-05:00 AUDIT: id="7684ac20-8300-4e08-b5db-028c627c95ae" ip="172.17.0.5" method="PUT" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:02.833553 4678 audit.go:45] 2017-01-25T05:12:02.833530746-05:00 AUDIT: id="7684ac20-8300-4e08-b5db-028c627c95ae" response="200" I0125 05:12:02.835732 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (12.025124ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:02.836349 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 0->1 I0125 05:12:02.836436 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 either never recorded expectations, or the ttl expired. I0125 05:12:02.836473 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:1, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1", timestamp:time.Time{sec:63620935922, nsec:836470323, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:02.836497 4678 replication_controller.go:541] Too few "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-helper-1" replicas, need 1, creating 1 I0125 05:12:02.837887 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:12:02.840889 4678 audit.go:125] 2017-01-25T05:12:02.84085035-05:00 AUDIT: id="27578dff-eec0-41a0-88ab-864a0585f526" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:12:02.844864 4678 audit.go:45] 2017-01-25T05:12:02.844847608-05:00 AUDIT: id="27578dff-eec0-41a0-88ab-864a0585f526" response="200" I0125 05:12:02.845015 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (4.396247ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:02.845340 4678 audit.go:125] 2017-01-25T05:12:02.845297879-05:00 AUDIT: id="60d45278-595f-47f8-b06c-cb90f3d6c041" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:12:02.845442 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 2) I0125 05:12:02.846233 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:12:02.846510 4678 audit.go:125] 2017-01-25T05:12:02.846487119-05:00 AUDIT: id="9ce3493a-b647-4a74-aa8c-1e10d877eba5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/images/sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5" I0125 05:12:02.846950 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:02.858983 4678 audit.go:45] 2017-01-25T05:12:02.858961485-05:00 AUDIT: id="9ce3493a-b647-4a74-aa8c-1e10d877eba5" response="200" I0125 05:12:02.860082 4678 panics.go:76] GET /oapi/v1/images/sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5: (13.791808ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:02.861523 4678 admission.go:77] getting security context constraints for pod (generate: postgresql-helper-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:replication-controller cab49cde-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:12:02.861560 4678 admission.go:88] getting security context constraints for pod (generate: postgresql-helper-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:02.863543 4678 audit.go:125] 2017-01-25T05:12:02.863503781-05:00 AUDIT: id="30188089-a5ad-4bf3-a719-7c93cfb241aa" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:02.866764 4678 audit.go:45] 2017-01-25T05:12:02.866746396-05:00 AUDIT: id="30188089-a5ad-4bf3-a719-7c93cfb241aa" response="200" I0125 05:12:02.866915 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (3.672965ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:02.867644 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:02.867668 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:02.867684 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:02.867723 4678 admission.go:149] validating pod (generate: postgresql-helper-1-) against providers restricted I0125 05:12:02.867816 4678 admission.go:116] pod (generate: postgresql-helper-1-) validated against provider restricted I0125 05:12:02.869427 4678 audit.go:125] 2017-01-25T05:12:02.869321939-05:00 AUDIT: id="6b3c7f45-b64b-4651-9b16-c6294b5b6fda" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:02.871950 4678 audit.go:45] 2017-01-25T05:12:02.871934643-05:00 AUDIT: id="60d45278-595f-47f8-b06c-cb90f3d6c041" response="201" I0125 05:12:02.872285 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (30.16376ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.873572 4678 audit.go:45] 2017-01-25T05:12:02.873540955-05:00 AUDIT: id="6b3c7f45-b64b-4651-9b16-c6294b5b6fda" response="200" I0125 05:12:02.874401 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (12.280039ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:02.875189 4678 factory.go:488] About to try and schedule pod postgresql-helper-1-cpv6d I0125 05:12:02.875213 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d I0125 05:12:02.875251 4678 disruption.go:314] addPod called on pod "postgresql-helper-1-cpv6d" I0125 05:12:02.875297 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:12:02.875309 4678 disruption.go:317] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:12:02.875561 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1", timestamp:time.Time{sec:63620935922, nsec:836470323, loc:(*time.Location)(0xa2479e0)}} I0125 05:12:02.875691 4678 replica_set.go:288] Pod postgresql-helper-1-cpv6d created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-cpv6d", GenerateName:"postgresql-helper-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11030", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:868420924, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"app":"postgresql-ephemeral", "deployment":"postgresql-helper-1", "deploymentconfig":"postgresql-helper", "name":"postgresql-helper"}, Annotations:map[string]string{"openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"1", "openshift.io/scc":"restricted", "openshift.io/deployment-config.name":"postgresql-helper", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-helper-1\",\"uid\":\"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11028\"}}\n", "openshift.io/deployment.name":"postgresql-helper-1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-helper-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc4305c64a0), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42fdfb230), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql", Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_USER", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42e60d2e0)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42e60d3a0)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"sampledb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}, Requests:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-helper-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42fdfb320), ReadinessProbe:(*api.Probe)(0xc42fdfb350), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42fdfb380), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc4305c6580), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc43578eb00), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:12:02.876122 4678 pet_set.go:160] Pod postgresql-helper-1-cpv6d created, labels: map[deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1] I0125 05:12:02.876158 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:12:02.876237 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:12:02.875188 4678 controller_utils.go:512] Controller postgresql-helper-1 created pod postgresql-helper-1-cpv6d I0125 05:12:02.876339 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 0->0 (need 1), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:12:02.876966 4678 selector_spreading.go:114] skipping pending-deleted pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc I0125 05:12:02.877051 4678 factory.go:648] Attempting to bind postgresql-helper-1-cpv6d to 172.18.7.222 I0125 05:12:02.885660 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1", UID:"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11028", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: postgresql-helper-1-cpv6d I0125 05:12:02.891665 4678 audit.go:125] 2017-01-25T05:12:02.891620131-05:00 AUDIT: id="5dace493-8d86-47b1-a0eb-d0a98096892b" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:12:02.898894 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:12:02.898921 4678 daemoncontroller.go:309] Pod postgresql-helper-1-cpv6d added. I0125 05:12:02.898962 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:12:02.901090 4678 audit.go:45] 2017-01-25T05:12:02.901068039-05:00 AUDIT: id="5dace493-8d86-47b1-a0eb-d0a98096892b" response="201" I0125 05:12:02.901153 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (9.714673ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:02.901917 4678 audit.go:125] 2017-01-25T05:12:02.901881631-05:00 AUDIT: id="ef16d75d-15ad-4d2c-a901-78f03059ecd8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:02.902910 4678 config.go:281] Setting pods for source api I0125 05:12:02.904049 4678 config.go:397] Receiving a new pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.904123 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.904546 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.905165 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:02.905686 4678 replication_controller.go:378] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11030 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11031 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:02.905809 4678 replica_set.go:320] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11030 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11031 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:02.905893 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:12:02.905915 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:12:02.905929 4678 daemoncontroller.go:332] Pod postgresql-helper-1-cpv6d updated. I0125 05:12:02.905953 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:12:02.905969 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-cpv6d" I0125 05:12:02.905979 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:12:02.905998 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:12:02.906064 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:12:02.906767 4678 audit.go:125] 2017-01-25T05:12:02.906738882-05:00 AUDIT: id="718a245e-bfc3-440a-92af-2476aa53e591" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:02.907604 4678 audit.go:45] 2017-01-25T05:12:02.907591611-05:00 AUDIT: id="ef16d75d-15ad-4d2c-a901-78f03059ecd8" response="200" I0125 05:12:02.907663 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (19.987659ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:02.908012 4678 audit.go:125] 2017-01-25T05:12:02.907988476-05:00 AUDIT: id="3d263f82-2a33-427b-aaa9-a10918991ac4" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:02.908430 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:12:02.912978 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:02.913018 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:02.931934 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/POD podIP: "" creating hosts mount: false I0125 05:12:02.932929 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4264ae2c0 Mounts:[] Config:0xc43319b7a0 NetworkSettings:0xc43326e200} I0125 05:12:02.938563 4678 audit.go:45] 2017-01-25T05:12:02.938538758-05:00 AUDIT: id="3d263f82-2a33-427b-aaa9-a10918991ac4" response="200" I0125 05:12:02.938918 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (31.090056ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.940672 4678 audit.go:125] 2017-01-25T05:12:02.940601661-05:00 AUDIT: id="ea4ba254-03b8-4803-b5b6-9848f57fe118" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:12:02.940971 4678 audit.go:125] 2017-01-25T05:12:02.940941212-05:00 AUDIT: id="d85965c2-ebe3-42d6-ad9d-3f0dc6bc977e" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:02.942729 4678 audit.go:125] 2017-01-25T05:12:02.942658283-05:00 AUDIT: id="0a49d227-91eb-4ca7-ae8b-66808a88c191" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:02.945097 4678 audit.go:125] 2017-01-25T05:12:02.945065497-05:00 AUDIT: id="1ae21427-065d-4b25-a58e-315931fb9462" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status" I0125 05:12:02.946303 4678 audit.go:45] 2017-01-25T05:12:02.946270817-05:00 AUDIT: id="d85965c2-ebe3-42d6-ad9d-3f0dc6bc977e" response="200" I0125 05:12:02.946393 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (36.505755ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:02.949728 4678 audit.go:45] 2017-01-25T05:12:02.949715064-05:00 AUDIT: id="ea4ba254-03b8-4803-b5b6-9848f57fe118" response="200" I0125 05:12:02.954516 4678 audit.go:125] 2017-01-25T05:12:02.954444199-05:00 AUDIT: id="bff8096e-5ca6-4a91-b724-d46c7681435f" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-helper-1&resourceVersion=11028" I0125 05:12:02.955279 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:12:02.955337 4678 audit.go:45] 2017-01-25T05:12:02.955317693-05:00 AUDIT: id="bff8096e-5ca6-4a91-b724-d46c7681435f" response="200" I0125 05:12:02.955682 4678 audit.go:45] 2017-01-25T05:12:02.955668727-05:00 AUDIT: id="1ae21427-065d-4b25-a58e-315931fb9462" response="200" I0125 05:12:02.955706 4678 audit.go:45] 2017-01-25T05:12:02.955695825-05:00 AUDIT: id="718a245e-bfc3-440a-92af-2476aa53e591" response="201" I0125 05:12:02.955777 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (49.255623ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:02.955790 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status: (10.952241ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:02.957219 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (68.95903ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:02.977673 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (101.775456ms) I0125 05:12:02.979434 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (143.021584ms) I0125 05:12:02.979576 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:12:02.981800 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 1->1 I0125 05:12:02.983563 4678 status_manager.go:425] Status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc426a5ce80 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting:0xc426a5ce60 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID: ContainerID:}]} version:1 podName:postgresql-helper-1-cpv6d podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:02.989471 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:02.989510 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:02.990142 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:02.990160 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:02.991385 4678 worker.go:162] Probe target container not found: postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-slave I0125 05:12:02.996769 4678 audit.go:45] 2017-01-25T05:12:02.99673836-05:00 AUDIT: id="0a49d227-91eb-4ca7-ae8b-66808a88c191" response="201" I0125 05:12:02.996895 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (108.134234ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:03.000663 4678 replication_controller.go:378] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11031 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11035 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:03.001496 4678 replica_set.go:320] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11031 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11035 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral] Annotations:map[openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:03.001919 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:12:03.002551 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:12:03.002957 4678 daemoncontroller.go:332] Pod postgresql-helper-1-cpv6d updated. I0125 05:12:03.003024 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:12:03.003550 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-cpv6d" I0125 05:12:03.003581 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:12:03.003587 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:12:03.004919 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:12:03.006944 4678 worker.go:162] Probe target container not found: postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql I0125 05:12:03.007979 4678 config.go:281] Setting pods for source api I0125 05:12:03.013334 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.032277 4678 generic.go:342] PLEG: Write status for postgresql-master-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.4", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc427d3c0e0), (*container.ContainerStatus)(0xc429172b60)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:03.032397 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a"} I0125 05:12:03.032678 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.033231 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.036228 4678 audit.go:125] 2017-01-25T05:12:03.036164678-05:00 AUDIT: id="cde8853b-ba69-4e99-9a9d-5b3dddd4f36e" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy" I0125 05:12:03.046815 4678 proxier.go:797] syncProxyRules took 488.179305ms I0125 05:12:03.046845 4678 proxier.go:431] OnServiceUpdate took 488.244583ms for 4 services I0125 05:12:03.046913 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:03.047402 4678 proxier.go:804] Syncing iptables rules I0125 05:12:03.047418 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:03.066783 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:03.066935 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:03.066959 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:03.066984 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:03.067010 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:03.067028 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:03.067053 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:03.071376 4678 audit.go:45] 2017-01-25T05:12:03.071342505-05:00 AUDIT: id="cde8853b-ba69-4e99-9a9d-5b3dddd4f36e" response="200" I0125 05:12:03.071602 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy: (36.659587ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:03.094745 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:03 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc426562d60 0 [] true false map[] 0xc432e6fd10 } I0125 05:12:03.094823 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:03.094898 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:03 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc426562ec0 0 [] true false map[] 0xc432e6fb30 } I0125 05:12:03.094963 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:03.096442 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/empty-dir/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-helper-data" (spec.Name: "postgresql-helper-data") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:12:03.096492 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:12:03.096692 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:03.105642 4678 audit.go:125] 2017-01-25T05:12:03.105154782-05:00 AUDIT: id="bc686012-f45f-4503-b54b-65b3c17524c0" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status" I0125 05:12:03.106636 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:03.138309 4678 audit.go:125] 2017-01-25T05:12:03.138230372-05:00 AUDIT: id="3c2f28f1-63e2-42ca-a21a-ee6e6bcd7c05" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:03.143093 4678 audit.go:125] 2017-01-25T05:12:03.143022738-05:00 AUDIT: id="034bb9f5-d8fd-407b-927c-427e2a7906f0" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:03.148397 4678 audit.go:125] 2017-01-25T05:12:03.14833485-05:00 AUDIT: id="2d129fff-3af0-475b-ad29-e3bbb51285f6" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:12:03.149292 4678 audit.go:45] 2017-01-25T05:12:03.149274288-05:00 AUDIT: id="bc686012-f45f-4503-b54b-65b3c17524c0" response="200" I0125 05:12:03.149496 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status: (44.825225ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:03.152426 4678 audit.go:45] 2017-01-25T05:12:03.152410056-05:00 AUDIT: id="2d129fff-3af0-475b-ad29-e3bbb51285f6" response="409" I0125 05:12:03.152709 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (154.143964ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:03.158405 4678 audit.go:45] 2017-01-25T05:12:03.158372544-05:00 AUDIT: id="034bb9f5-d8fd-407b-927c-427e2a7906f0" response="200" I0125 05:12:03.158721 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (16.134987ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:03.160224 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:03.161651 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:03.162395 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:03.164046 4678 status_manager.go:425] Status for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935923 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.4 StartTime:0xc432769900 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc4265702e0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f}]} version:2 podName:postgresql-master-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:03.164838 4678 config.go:281] Setting pods for source api I0125 05:12:03.169392 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.170842 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:03.194155 4678 audit.go:45] 2017-01-25T05:12:03.19410232-05:00 AUDIT: id="3c2f28f1-63e2-42ca-a21a-ee6e6bcd7c05" response="200" I0125 05:12:03.194514 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (197.092303ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.198051 4678 replication_controller.go:378] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10961 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11037 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:03.198446 4678 replication_controller.go:255] No controllers found for pod postgresql-master-1-deploy, replication manager will avoid syncing I0125 05:12:03.198519 4678 replica_set.go:320] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10961 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11037 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:03.198758 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:03.198980 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-deploy, job controller will avoid syncing I0125 05:12:03.199049 4678 daemoncontroller.go:332] Pod postgresql-master-1-deploy updated. I0125 05:12:03.199165 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-deploy, daemon set controller will avoid syncing I0125 05:12:03.199235 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-deploy" I0125 05:12:03.199347 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:03.199356 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-deploy" I0125 05:12:03.200601 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:03.202305 4678 worker.go:162] Probe target container not found: postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-slave I0125 05:12:03.202756 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/empty-dir/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-helper-data" (spec.Name: "postgresql-helper-data") to pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:03.202958 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") to pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:03.203079 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:03.203718 4678 empty_dir.go:248] pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_default-token-0g2nw I0125 05:12:03.203738 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw tmpfs [] with command: "mount" I0125 05:12:03.203749 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw]) I0125 05:12:03.250411 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:03.290979 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:12:03.293514 4678 audit.go:125] 2017-01-25T05:12:03.293469278-05:00 AUDIT: id="7dcc2d7d-8703-4a68-b04c-1398e51f59b7" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:03.294254 4678 audit.go:45] 2017-01-25T05:12:03.294237492-05:00 AUDIT: id="7dcc2d7d-8703-4a68-b04c-1398e51f59b7" response="200" I0125 05:12:03.294748 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.507742ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:03.298641 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:03.347583 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/POD: setting entrypoint "[]" and command "[]" I0125 05:12:03.358094 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.359826 4678 audit.go:125] 2017-01-25T05:12:03.359742891-05:00 AUDIT: id="b35d0666-ed6e-42e5-8bdb-2c8ac7263448" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:03.360780 4678 audit.go:125] 2017-01-25T05:12:03.360663866-05:00 AUDIT: id="86b86971-8f23-4e0b-8aa7-462863701ebc" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:03.361843 4678 audit.go:125] 2017-01-25T05:12:03.361776634-05:00 AUDIT: id="2e0a874c-0c19-446b-8d32-55de6964c27c" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:12:03.362378 4678 audit.go:45] 2017-01-25T05:12:03.362357492-05:00 AUDIT: id="86b86971-8f23-4e0b-8aa7-462863701ebc" response="200" I0125 05:12:03.362677 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (66.271702ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.363508 4678 audit.go:125] 2017-01-25T05:12:03.363443832-05:00 AUDIT: id="cd25f0a8-5538-4d76-95c6-ff63f6247ab9" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:03.363765 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (385.984952ms) I0125 05:12:03.365082 4678 audit.go:125] 2017-01-25T05:12:03.3649377-05:00 AUDIT: id="cfbe9e7a-b6ff-4a39-9c61-37a3415b9829" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:03.388965 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:03.417878 4678 audit.go:45] 2017-01-25T05:12:03.417841018-05:00 AUDIT: id="2e0a874c-0c19-446b-8d32-55de6964c27c" response="200" I0125 05:12:03.418602 4678 audit.go:45] 2017-01-25T05:12:03.418585918-05:00 AUDIT: id="cd25f0a8-5538-4d76-95c6-ff63f6247ab9" response="200" I0125 05:12:03.419102 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (57.836847ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:03.419437 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (56.372739ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:03.420916 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.421006 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.421026 4678 docker_manager.go:1999] pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f I0125 05:12:03.421237 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a:-1 a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f:0]} I0125 05:12:03.421486 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:12:03.421636 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d volume default-token-0g2nw: write required for target directory /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:03.422145 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d volume default-token-0g2nw: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw/..1981_25_01_05_12_03.845724609 I0125 05:12:03.422559 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:03.425463 4678 audit.go:45] 2017-01-25T05:12:03.425437206-05:00 AUDIT: id="b35d0666-ed6e-42e5-8bdb-2c8ac7263448" response="200" I0125 05:12:03.425956 4678 audit.go:45] 2017-01-25T05:12:03.425937687-05:00 AUDIT: id="cfbe9e7a-b6ff-4a39-9c61-37a3415b9829" response="200" I0125 05:12:03.439742 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (243.145451ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:03.440177 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (150.753604ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:03.442239 4678 audit.go:125] 2017-01-25T05:12:03.442157231-05:00 AUDIT: id="a5a80150-0eb7-4227-8fcb-1b97c70cced8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:03.443355 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 1->1 I0125 05:12:03.443529 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (84.88µs) I0125 05:12:03.443876 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:12:03.444520 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 status from Pending to Running (scale: 1) I0125 05:12:03.444806 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:03.466193 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:12:03.491319 4678 quota.go:156] XFS quota applied: device=/dev/mapper/docker--vg-openshift--xfs--vol--dir, quota=4697620480, fsGroup=1000640000 I0125 05:12:03.491361 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/empty-dir/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-helper-data" (spec.Name: "postgresql-helper-data") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:03.491762 4678 audit.go:45] 2017-01-25T05:12:03.491738213-05:00 AUDIT: id="a5a80150-0eb7-4227-8fcb-1b97c70cced8" response="200" I0125 05:12:03.491886 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (105.766965ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.493411 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:03.506588 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:12:03.507312 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.512054 4678 audit.go:125] 2017-01-25T05:12:03.512002102-05:00 AUDIT: id="318d7846-a49c-4aa9-aa54-9857b4c3fbcd" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:12:03.513919 4678 audit.go:125] 2017-01-25T05:12:03.513874209-05:00 AUDIT: id="2454dc4f-cb08-4c97-bb67-0eb9deb026a8" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:12:03.518635 4678 audit.go:45] 2017-01-25T05:12:03.518613842-05:00 AUDIT: id="318d7846-a49c-4aa9-aa54-9857b4c3fbcd" response="200" I0125 05:12:03.519077 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (7.309825ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:03.521053 4678 audit.go:125] 2017-01-25T05:12:03.520999516-05:00 AUDIT: id="4940e64d-10f0-4ccb-b338-b767c7024bc7" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:03.522750 4678 audit.go:45] 2017-01-25T05:12:03.522731517-05:00 AUDIT: id="4940e64d-10f0-4ccb-b338-b767c7024bc7" response="200" I0125 05:12:03.522844 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (14.153379ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.523655 4678 audit.go:45] 2017-01-25T05:12:03.523636419-05:00 AUDIT: id="2454dc4f-cb08-4c97-bb67-0eb9deb026a8" response="200" I0125 05:12:03.534123 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (75.738496ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:03.534653 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:03.569755 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:12:03.571378 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:12:03.573595 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-helper-1&resourceVersion=11028: (686.573131ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:03.574735 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (7.992µs) I0125 05:12:03.576290 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:12:03.576441 4678 docker_manager.go:1992] Container {Name:postgresql Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 Command:[] Args:[] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_USER Value: ValueFrom:0xc426a5c200} {Name:POSTGRESQL_PASSWORD Value: ValueFrom:0xc426a5c240} {Name:POSTGRESQL_DATABASE Value:sampledb ValueFrom:}] Resources:{Limits:map[memory:{i:{value:536870912 scale:0} d:{Dec:} s: Format:BinarySI}] Requests:map[memory:{i:{value:536870912 scale:0} d:{Dec:} s: Format:BinarySI}]} VolumeMounts:[{Name:postgresql-helper-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc435846f90 ReadinessProbe:0xc435846fc0 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc435846ff0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:12:03.576480 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:postgresql Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 Command:[] Args:[] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_USER Value: ValueFrom:0xc426a5c200} {Name:POSTGRESQL_PASSWORD Value: ValueFrom:0xc426a5c240} {Name:POSTGRESQL_DATABASE Value:sampledb ValueFrom:}] Resources:{Limits:map[memory:{i:{value:536870912 scale:0} d:{Dec:} s: Format:BinarySI}] Requests:map[memory:{i:{value:536870912 scale:0} d:{Dec:} s: Format:BinarySI}]} VolumeMounts:[{Name:postgresql-helper-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc435846f90 ReadinessProbe:0xc435846fc0 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc435846ff0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:12:03.576536 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:12:03.576563 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:03.577940 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (214.038592ms) I0125 05:12:03.578370 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (598.879535ms) I0125 05:12:03.578443 4678 trace.go:61] Trace "syncReplicationController: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (started 2017-01-25 05:12:02.979484259 -0500 EST): [15.651µs] [15.651µs] ReplicationController restored [18.881µs] [3.23µs] Expectations restored [52.371µs] [33.49µs] manageReplicas done [598.903476ms] [598.851105ms] END I0125 05:12:03.578553 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:12:03.579178 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 1->1 I0125 05:12:03.579193 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-helper-1, 0->1 I0125 05:12:03.588445 4678 audit.go:125] 2017-01-25T05:12:03.588382819-05:00 AUDIT: id="e579914b-16ab-4fae-836d-0e2a385744c3" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:12:03.596555 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:03.596596 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:03.630923 4678 audit.go:125] 2017-01-25T05:12:03.630877334-05:00 AUDIT: id="4be47e82-822c-4699-94f3-132af45fe679" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:03.633091 4678 audit.go:45] 2017-01-25T05:12:03.633074056-05:00 AUDIT: id="e579914b-16ab-4fae-836d-0e2a385744c3" response="200" I0125 05:12:03.633227 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (45.125775ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:03.633411 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:12:03.639872 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:12:03.643815 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 2) I0125 05:12:03.651610 4678 audit.go:45] 2017-01-25T05:12:03.651586029-05:00 AUDIT: id="4be47e82-822c-4699-94f3-132af45fe679" response="200" I0125 05:12:03.652165 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (78.173496ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:03.657093 4678 audit.go:125] 2017-01-25T05:12:03.657042945-05:00 AUDIT: id="0af941f9-7b9b-47df-a6b8-67f07ff4c6bd" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:03.657609 4678 audit.go:125] 2017-01-25T05:12:03.657578097-05:00 AUDIT: id="dec30d3d-6518-4614-bf38-45b8d3c001a1" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:03.657911 4678 audit.go:125] 2017-01-25T05:12:03.65788821-05:00 AUDIT: id="b9752b28-5d02-4ba1-a3e6-8533d88dc253" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:03.658166 4678 audit.go:125] 2017-01-25T05:12:03.658139408-05:00 AUDIT: id="6b31902d-2d48-46a8-81ef-80a305fe1292" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:12:03.658515 4678 audit.go:125] 2017-01-25T05:12:03.658482483-05:00 AUDIT: id="cbc9c385-89e3-4c70-bdec-69bc8e1d0db0" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:12:03.659154 4678 audit.go:45] 2017-01-25T05:12:03.659140942-05:00 AUDIT: id="cbc9c385-89e3-4c70-bdec-69bc8e1d0db0" response="409" I0125 05:12:03.659236 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (73.330847ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:03.659353 4678 audit.go:125] 2017-01-25T05:12:03.659328965-05:00 AUDIT: id="d9ad2b74-d156-421f-aa13-40df2f8ae0fb" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:12:03.659882 4678 proxier.go:797] syncProxyRules took 612.480707ms I0125 05:12:03.659901 4678 proxier.go:566] OnEndpointsUpdate took 612.764225ms for 6 endpoints I0125 05:12:03.659930 4678 proxier.go:381] Received update notice: [] I0125 05:12:03.659960 4678 proxier.go:804] Syncing iptables rules I0125 05:12:03.659967 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:03.672225 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/POD podIP: "" creating hosts mount: false I0125 05:12:03.677175 4678 audit.go:45] 2017-01-25T05:12:03.677153374-05:00 AUDIT: id="6b31902d-2d48-46a8-81ef-80a305fe1292" response="200" I0125 05:12:03.677321 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (94.370692ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.677628 4678 audit.go:45] 2017-01-25T05:12:03.677616814-05:00 AUDIT: id="0af941f9-7b9b-47df-a6b8-67f07ff4c6bd" response="200" I0125 05:12:03.677695 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (93.529689ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.677945 4678 audit.go:45] 2017-01-25T05:12:03.677931056-05:00 AUDIT: id="d9ad2b74-d156-421f-aa13-40df2f8ae0fb" response="200" I0125 05:12:03.678004 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (94.393463ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.678238 4678 audit.go:45] 2017-01-25T05:12:03.678227256-05:00 AUDIT: id="dec30d3d-6518-4614-bf38-45b8d3c001a1" response="200" I0125 05:12:03.678291 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (92.978251ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.678474 4678 audit.go:45] 2017-01-25T05:12:03.678464941-05:00 AUDIT: id="b9752b28-5d02-4ba1-a3e6-8533d88dc253" response="200" I0125 05:12:03.678550 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (93.825869ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.680559 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:03.680892 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (109.257537ms) I0125 05:12:03.680969 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:12:03.681255 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (106.478911ms) I0125 05:12:03.681344 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:12:03.691286 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:03.691739 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:03.708795 4678 audit.go:125] 2017-01-25T05:12:03.708741721-05:00 AUDIT: id="8891159c-b40b-424b-9a28-a16f228b5b24" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:03.712444 4678 audit.go:45] 2017-01-25T05:12:03.712430662-05:00 AUDIT: id="8891159c-b40b-424b-9a28-a16f228b5b24" response="200" I0125 05:12:03.712742 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (33.746388ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:03.712989 4678 audit.go:125] 2017-01-25T05:12:03.712959246-05:00 AUDIT: id="e224e911-2f65-421c-9c98-46754b85789b" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:03.713685 4678 audit.go:45] 2017-01-25T05:12:03.713662367-05:00 AUDIT: id="e224e911-2f65-421c-9c98-46754b85789b" response="200" I0125 05:12:03.713740 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (31.495997ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.713881 4678 audit.go:125] 2017-01-25T05:12:03.713851097-05:00 AUDIT: id="882faf90-49d6-45b9-b8ec-aaa8aa89a26f" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:03.714128 4678 audit.go:125] 2017-01-25T05:12:03.714101986-05:00 AUDIT: id="c02b011b-2834-47ae-8190-b110b6837816" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:03.714591 4678 audit.go:45] 2017-01-25T05:12:03.714580027-05:00 AUDIT: id="c02b011b-2834-47ae-8190-b110b6837816" response="200" I0125 05:12:03.714644 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (31.636429ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.715026 4678 audit.go:45] 2017-01-25T05:12:03.715015437-05:00 AUDIT: id="882faf90-49d6-45b9-b8ec-aaa8aa89a26f" response="200" I0125 05:12:03.715066 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (9.075088ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:03.715515 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (139.976978ms) I0125 05:12:03.715727 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 1->1 (need 1), fullyLabeledReplicas 1->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:12:03.716021 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (140.745365ms) I0125 05:12:03.716087 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (138.111324ms) I0125 05:12:03.722994 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:03.739338 4678 audit.go:125] 2017-01-25T05:12:03.739250298-05:00 AUDIT: id="4986e20f-7eb8-46be-bf06-89dd3df61f68" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:12:03.743950 4678 audit.go:45] 2017-01-25T05:12:03.743931583-05:00 AUDIT: id="4986e20f-7eb8-46be-bf06-89dd3df61f68" response="200" I0125 05:12:03.745906 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (29.49133ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:03.746819 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (168.337158ms) I0125 05:12:03.746921 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (56.84µs) I0125 05:12:03.747117 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 1->1 I0125 05:12:03.747170 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (27.422µs) I0125 05:12:03.747401 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:12:03.755695 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:12:03.755739 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:12:03.755751 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:12:03.756393 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:03.785091 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:12:03.789560 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:03.810299 4678 audit.go:125] 2017-01-25T05:12:03.810224205-05:00 AUDIT: id="e094cd43-b0a4-492b-af9d-bd3776658864" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-helper-1%2Cdeploymentconfig%3Dpostgresql-helper%2Cname%3Dpostgresql-helper&resourceVersion=0" I0125 05:12:03.811046 4678 audit.go:45] 2017-01-25T05:12:03.811031718-05:00 AUDIT: id="e094cd43-b0a4-492b-af9d-bd3776658864" response="200" I0125 05:12:03.811413 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-helper-1%2Cdeploymentconfig%3Dpostgresql-helper%2Cname%3Dpostgresql-helper&resourceVersion=0: (5.010173ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:03.816833 4678 audit.go:125] 2017-01-25T05:12:03.816769857-05:00 AUDIT: id="bcd5566c-795b-48c6-8ec7-9f58aa7f7086" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-helper-1%2Cdeploymentconfig%3Dpostgresql-helper%2Cname%3Dpostgresql-helper&resourceVersion=11037&timeoutSeconds=401" I0125 05:12:03.817507 4678 audit.go:45] 2017-01-25T05:12:03.817495101-05:00 AUDIT: id="bcd5566c-795b-48c6-8ec7-9f58aa7f7086" response="200" I0125 05:12:03.825280 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:12:03.834402 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:03.844892 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/POD: setting entrypoint "[]" and command "[]" I0125 05:12:03.858758 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:03.888720 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:03.911980 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:03.936959 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2.scope" E0125 05:12:03.939794 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2.log: no such file or directory I0125 05:12:03.939848 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:12:03.939901 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:03.945817 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:03.945860 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:03.986332 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:12:03.988013 4678 proxier.go:797] syncProxyRules took 328.047422ms I0125 05:12:03.988037 4678 proxier.go:431] OnServiceUpdate took 328.096353ms for 4 services I0125 05:12:04.131062 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:12:04.131087 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:12:04.131131 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:12:04.131140 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:12:04.131162 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:04.131185 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:04.131189 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:04.131200 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:12:04.131208 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:12:04.131215 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:04.131219 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:04.131233 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:12:04.131062 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:12:04.131240 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:04.131248 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:04.131226 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:04.131360 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:04.131366 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:04.131371 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:04.131374 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:04.131416 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:04.131425 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:12:04.131454 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:12:04.131458 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:04.131474 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:04.131483 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:04.562106 4678 generic.go:145] GenericPLEG: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d: non-existent -> running I0125 05:12:04.562138 4678 generic.go:145] GenericPLEG: b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f: non-existent -> running I0125 05:12:04.562153 4678 generic.go:145] GenericPLEG: b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2: non-existent -> running I0125 05:12:04.588757 4678 manager.go:898] Added container: "/system.slice/docker-68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2.scope" (aliases: [k8s_POD.73b4fecf_postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8_b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094_858b7658 68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2], namespace: "docker") I0125 05:12:04.589043 4678 handler.go:325] Added event &{/system.slice/docker-68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2.scope 2017-01-25 05:12:03.179875978 -0500 EST containerCreation {}} I0125 05:12:04.589166 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b6efaf68\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:12:04.589178 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b6efaf68\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:04.589223 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b6efaf68\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:12:04.589239 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b6efaf68\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:04.589275 4678 container.go:407] Start housekeeping for container "/system.slice/docker-68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2.scope" I0125 05:12:04.604592 4678 worker.go:162] Probe target container not found: postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-master I0125 05:12:04.685745 4678 hairpin.go:110] Enabling hairpin on interface veth4bb3ee1 I0125 05:12:04.686057 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.6" I0125 05:12:04.686087 4678 docker_manager.go:2293] Creating container &{Name:postgresql-slave Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-slave] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_SERVICE_NAME Value:postgresql-master ValueFrom:} {Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc42cb02330 ReadinessProbe:0xc42cb02360 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc42cb02390 Stdin:false StdinOnce:false TTY:false} in pod postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:04.692824 4678 provider.go:119] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider I0125 05:12:04.692852 4678 config.go:132] looking for .docker/config.json at /mnt/openshift-xfs-vol-dir/config.json I0125 05:12:04.693065 4678 config.go:132] looking for .docker/config.json at /data/src/github.com/openshift/origin/config.json I0125 05:12:04.693149 4678 config.go:132] looking for .docker/config.json at /root/.docker/config.json I0125 05:12:04.693304 4678 config.go:132] looking for .docker/config.json at /.docker/config.json I0125 05:12:04.693328 4678 config.go:101] looking for .dockercfg at /mnt/openshift-xfs-vol-dir/.dockercfg I0125 05:12:04.693347 4678 config.go:101] looking for .dockercfg at /data/src/github.com/openshift/origin/.dockercfg I0125 05:12:04.693358 4678 config.go:101] looking for .dockercfg at /root/.dockercfg I0125 05:12:04.693368 4678 config.go:101] looking for .dockercfg at /.dockercfg I0125 05:12:04.693380 4678 provider.go:89] Unable to parse Docker config file: couldn't find valid .dockercfg after checking in [/mnt/openshift-xfs-vol-dir /root /] I0125 05:12:04.693397 4678 docker.go:239] Pulling image centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 without credentials I0125 05:12:04.693982 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11002", FieldPath:"spec.containers{postgresql-slave}"}): type: 'Normal' reason: 'Pulling' pulling image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:12:04.694645 4678 audit.go:125] 2017-01-25T05:12:04.694600818-05:00 AUDIT: id="35d27b05-8732-4571-9733-4a33db4741ea" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:04.697758 4678 audit.go:45] 2017-01-25T05:12:04.697742425-05:00 AUDIT: id="35d27b05-8732-4571-9733-4a33db4741ea" response="201" I0125 05:12:04.697831 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.508784ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:04.860097 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741.scope" E0125 05:12:04.860980 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741.log: no such file or directory I0125 05:12:04.861020 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:12:04.861082 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:04.918795 4678 hairpin.go:110] Enabling hairpin on interface vethc8e33d1 I0125 05:12:04.918920 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.7" I0125 05:12:04.918951 4678 docker_manager.go:2293] Creating container &{Name:postgresql-master Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-master] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:} {Name:POSTGRESQL_ADMIN_PASSWORD Value:X5NgRSrwacHP ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc4269cff50 ReadinessProbe:0xc4269cff80 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc4269cffb0 Stdin:false StdinOnce:false TTY:false} in pod postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:05.028812 4678 docker.go:239] Pulling image centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 without credentials I0125 05:12:05.029343 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11018", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Pulling' pulling image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:12:05.029904 4678 audit.go:125] 2017-01-25T05:12:05.029868904-05:00 AUDIT: id="6feae942-b82f-4614-b4ad-5a92d0d895d2" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:05.034865 4678 audit.go:45] 2017-01-25T05:12:05.034843959-05:00 AUDIT: id="6feae942-b82f-4614-b4ad-5a92d0d895d2" response="201" I0125 05:12:05.034948 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (5.283497ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.104095 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc432663340 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/ce4eccb1 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42ec659e0 NetworkSettings:0xc4368a7a00} E0125 05:12:05.104251 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017.log: no such file or directory I0125 05:12:05.104294 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:12:05.104360 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:05.144976 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc432663b80 Mounts:[] Config:0xc4348e4480 NetworkSettings:0xc4368a7b00} I0125 05:12:05.158918 4678 hairpin.go:110] Enabling hairpin on interface veth6716617 I0125 05:12:05.159030 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.8" I0125 05:12:05.159082 4678 docker_manager.go:2293] Creating container &{Name:postgresql Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 Command:[] Args:[] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_USER Value: ValueFrom:0xc426a5c200} {Name:POSTGRESQL_PASSWORD Value: ValueFrom:0xc426a5c240} {Name:POSTGRESQL_DATABASE Value:sampledb ValueFrom:}] Resources:{Limits:map[memory:{i:{value:536870912 scale:0} d:{Dec:} s: Format:BinarySI}] Requests:map[memory:{i:{value:536870912 scale:0} d:{Dec:} s: Format:BinarySI}]} VolumeMounts:[{Name:postgresql-helper-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc435846f90 ReadinessProbe:0xc435846fc0 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc435846ff0 Stdin:false StdinOnce:false TTY:false} in pod postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:05.190188 4678 audit.go:125] 2017-01-25T05:12:05.190146048-05:00 AUDIT: id="c3f12505-f8d2-462f-94c5-61b5b1316ae5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:12:05.191589 4678 audit.go:45] 2017-01-25T05:12:05.19157833-05:00 AUDIT: id="c3f12505-f8d2-462f-94c5-61b5b1316ae5" response="200" I0125 05:12:05.191657 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.807481ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:05.372831 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/postgresql podIP: "172.17.0.8" creating hosts mount: true I0125 05:12:05.373437 4678 manager.go:898] Added container: "/system.slice/docker-bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741.scope" (aliases: [k8s_POD.73b4fecf_postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8_b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094_ec0e86ef bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741], namespace: "docker") I0125 05:12:05.373571 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11031", FieldPath:"spec.containers{postgresql}"}): type: 'Normal' reason: 'Pulled' Container image "centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5" already present on machine I0125 05:12:05.373658 4678 handler.go:325] Added event &{/system.slice/docker-bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741.scope 2017-01-25 05:12:04.701878724 -0500 EST containerCreation {}} I0125 05:12:05.374030 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.5", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42d7c50a0), (*container.ContainerStatus)(0xc42d7c5260)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:05.374154 4678 container.go:407] Start housekeeping for container "/system.slice/docker-bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741.scope" I0125 05:12:05.374258 4678 audit.go:125] 2017-01-25T05:12:05.374219947-05:00 AUDIT: id="0cf90fd3-fb3a-4299-a9ff-9549d7eab9db" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/postgresql-helper" I0125 05:12:05.375120 4678 audit.go:125] 2017-01-25T05:12:05.375085455-05:00 AUDIT: id="52189acb-aced-419e-9a4e-7f56f02dd2a2" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:05.394672 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.395746 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.396520 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d"} I0125 05:12:05.396978 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017.scope" I0125 05:12:05.402888 4678 audit.go:45] 2017-01-25T05:12:05.402867488-05:00 AUDIT: id="0cf90fd3-fb3a-4299-a9ff-9549d7eab9db" response="200" I0125 05:12:05.403120 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/postgresql-helper: (29.188168ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.403337 4678 audit.go:125] 2017-01-25T05:12:05.403258042-05:00 AUDIT: id="2427cb5b-d639-4a5d-a4c2-a0564dadea94" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy" I0125 05:12:05.406955 4678 audit.go:45] 2017-01-25T05:12:05.406938489-05:00 AUDIT: id="2427cb5b-d639-4a5d-a4c2-a0564dadea94" response="200" I0125 05:12:05.407104 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy: (4.322781ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.408801 4678 audit.go:125] 2017-01-25T05:12:05.40876443-05:00 AUDIT: id="f958066a-8e7a-4a2e-ac89-817d337becd3" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status" I0125 05:12:05.413786 4678 audit.go:45] 2017-01-25T05:12:05.41377062-05:00 AUDIT: id="f958066a-8e7a-4a2e-ac89-817d337becd3" response="200" I0125 05:12:05.413882 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status: (5.399142ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.415683 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42a463e40 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/04ad5900 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42d595d40 NetworkSettings:0xc42f213100} I0125 05:12:05.417744 4678 replication_controller.go:378] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10989 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11045 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:05.417917 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-deploy, replication manager will avoid syncing I0125 05:12:05.417976 4678 replica_set.go:320] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10989 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11045 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:05.418242 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:05.418396 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-deploy, job controller will avoid syncing I0125 05:12:05.418695 4678 status_manager.go:425] Status for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935917 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935925 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935917 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.5 StartTime:0xc4300476a0 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc42d923260 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d}]} version:2 podName:postgresql-helper-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:05.418774 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-deploy" I0125 05:12:05.418795 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:05.418801 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-deploy" I0125 05:12:05.418915 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:05.419566 4678 daemoncontroller.go:332] Pod postgresql-helper-1-deploy updated. I0125 05:12:05.419664 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-deploy, daemon set controller will avoid syncing I0125 05:12:05.420113 4678 config.go:281] Setting pods for source api I0125 05:12:05.422801 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.426385 4678 audit.go:125] 2017-01-25T05:12:05.426327431-05:00 AUDIT: id="86a15476-5a67-4088-96b2-1b110f90bcc9" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:05.433058 4678 audit.go:45] 2017-01-25T05:12:05.433035346-05:00 AUDIT: id="86a15476-5a67-4088-96b2-1b110f90bcc9" response="200" I0125 05:12:05.435288 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (12.010815ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:05.436483 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:12:05.436883 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 1->1 I0125 05:12:05.437112 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (117.642µs) I0125 05:12:05.437532 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 status from Pending to Running (scale: 1) I0125 05:12:05.439740 4678 audit.go:45] 2017-01-25T05:12:05.439719656-05:00 AUDIT: id="52189acb-aced-419e-9a4e-7f56f02dd2a2" response="201" I0125 05:12:05.439825 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (64.980615ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.443817 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:05.444615 4678 audit.go:125] 2017-01-25T05:12:05.444570241-05:00 AUDIT: id="4274682e-fedc-46fe-b020-eedc66abc5ab" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:05.446984 4678 audit.go:45] 2017-01-25T05:12:05.446964249-05:00 AUDIT: id="4274682e-fedc-46fe-b020-eedc66abc5ab" response="200" I0125 05:12:05.447216 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (2.901109ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.447557 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:05.447965 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:05.448259 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:05.486152 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4329bb8c0 Mounts:[] Config:0xc43142c5a0 NetworkSettings:0xc429d33500} I0125 05:12:05.501409 4678 generic.go:342] PLEG: Write status for postgresql-master-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.4", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a6c3420), (*container.ContainerStatus)(0xc42a7b95e0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:05.501590 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f"} I0125 05:12:05.501659 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.501851 4678 status_manager.go:312] Ignoring same status for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:11:56 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:03 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:11:56 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.4 StartTime:2017-01-25 05:11:56 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc435cd9b80 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f}]} I0125 05:12:05.501977 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.547552 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:05.549839 4678 audit.go:125] 2017-01-25T05:12:05.549791141-05:00 AUDIT: id="bd59de0a-3167-448b-860d-29e8a45dcfe1" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:05.554136 4678 audit.go:45] 2017-01-25T05:12:05.554112911-05:00 AUDIT: id="bd59de0a-3167-448b-860d-29e8a45dcfe1" response="200" I0125 05:12:05.554375 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (4.873533ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.554985 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:05.555219 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:05.555377 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:05.620844 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4294acc60 Mounts:[] Config:0xc4291a4c60 NetworkSettings:0xc437842e00} I0125 05:12:05.666459 4678 panics.go:76] GET /api/v1/watch/services?resourceVersion=10097&timeoutSeconds=452: (7m32.003637275s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:service-serving-cert-controller] 172.18.7.222:50846] I0125 05:12:05.666747 4678 reflector.go:392] github.com/openshift/origin/pkg/service/controller/servingcert/secret_updating_controller.go:109: Watch close - *api.Service total 11 items received I0125 05:12:05.671367 4678 audit.go:125] 2017-01-25T05:12:05.671320153-05:00 AUDIT: id="1dcdbf2b-b4af-471d-a4ca-9b85e62709cd" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:service-serving-cert-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/services?resourceVersion=10976&timeoutSeconds=359" I0125 05:12:05.671900 4678 audit.go:45] 2017-01-25T05:12:05.671884863-05:00 AUDIT: id="1dcdbf2b-b4af-471d-a4ca-9b85e62709cd" response="200" I0125 05:12:05.687613 4678 generic.go:342] PLEG: Write status for postgresql-slave-1-qt1rc/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-slave-1-qt1rc", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.6", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a7b9880)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:05.687709 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.688119 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.689902 4678 audit.go:125] 2017-01-25T05:12:05.689854924-05:00 AUDIT: id="31121520-84f2-46a6-ba8a-82f825deefa1" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy" I0125 05:12:05.692673 4678 audit.go:45] 2017-01-25T05:12:05.692656333-05:00 AUDIT: id="31121520-84f2-46a6-ba8a-82f825deefa1" response="200" I0125 05:12:05.692782 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy: (3.867402ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.694092 4678 audit.go:125] 2017-01-25T05:12:05.694004534-05:00 AUDIT: id="42842b49-1799-417b-ae46-4dbdc18f8000" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status" I0125 05:12:05.698879 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2"} I0125 05:12:05.698957 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:05.699269 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.699849 4678 audit.go:125] 2017-01-25T05:12:05.699810557-05:00 AUDIT: id="80dddf4b-9003-439e-a3b7-3f8e9843f242" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:05.700981 4678 audit.go:45] 2017-01-25T05:12:05.700965363-05:00 AUDIT: id="80dddf4b-9003-439e-a3b7-3f8e9843f242" response="200" I0125 05:12:05.701188 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.629783ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.701489 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.701558 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.701577 4678 docker_manager.go:1999] pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as 57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d I0125 05:12:05.701667 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b:-1 57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d:0]} I0125 05:12:05.702030 4678 audit.go:45] 2017-01-25T05:12:05.702017331-05:00 AUDIT: id="42842b49-1799-417b-ae46-4dbdc18f8000" response="200" I0125 05:12:05.702098 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status: (8.497004ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.702461 4678 status_manager.go:425] Status for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935925 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:0xc430d2f780 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc437098b80 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d}]} version:2 podName:postgresql-slave-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:05.702948 4678 replication_controller.go:378] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10972 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11048 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:05.703055 4678 replication_controller.go:255] No controllers found for pod postgresql-slave-1-deploy, replication manager will avoid syncing I0125 05:12:05.703072 4678 replica_set.go:320] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:10972 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11048 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:05.703130 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:05.703152 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-deploy, job controller will avoid syncing I0125 05:12:05.703168 4678 daemoncontroller.go:332] Pod postgresql-slave-1-deploy updated. I0125 05:12:05.703188 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-deploy, daemon set controller will avoid syncing I0125 05:12:05.703218 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-deploy" I0125 05:12:05.703233 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:05.703238 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-deploy" I0125 05:12:05.703297 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:05.703529 4678 config.go:281] Setting pods for source api I0125 05:12:05.749453 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:05.750340 4678 audit.go:125] 2017-01-25T05:12:05.750290826-05:00 AUDIT: id="0db170ec-3b93-452a-975f-b160b70d00c8" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:05.752298 4678 audit.go:45] 2017-01-25T05:12:05.752280641-05:00 AUDIT: id="0db170ec-3b93-452a-975f-b160b70d00c8" response="200" I0125 05:12:05.752690 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (2.690201ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.752933 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:05.754026 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:05.754210 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:05.783957 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.803549 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.805140 4678 audit.go:125] 2017-01-25T05:12:05.805087331-05:00 AUDIT: id="15decb3d-0f42-4275-99e5-ecc2e690d400" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:05.806956 4678 audit.go:45] 2017-01-25T05:12:05.806938449-05:00 AUDIT: id="15decb3d-0f42-4275-99e5-ecc2e690d400" response="200" I0125 05:12:05.807177 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (3.06691ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:05.807474 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.807547 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:05.807565 4678 docker_manager.go:1999] pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f I0125 05:12:05.807652 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a:-1 a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f:0]} I0125 05:12:05.809232 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/postgresql: setting entrypoint "[]" and command "[]" I0125 05:12:05.957782 4678 manager.go:898] Added container: "/system.slice/docker-969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017.scope" (aliases: [k8s_POD.73b4fecf_postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8_b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094_777532d7 969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017], namespace: "docker") I0125 05:12:05.958053 4678 handler.go:325] Added event &{/system.slice/docker-969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017.scope 2017-01-25 05:12:04.720878758 -0500 EST containerCreation {}} I0125 05:12:05.961338 4678 container.go:407] Start housekeeping for container "/system.slice/docker-969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017.scope" I0125 05:12:05.976235 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b76687cc\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:12:05.976367 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:05.976444 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:12:05.976462 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:05.988020 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:05.988043 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:05.991987 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.001981 4678 audit.go:125] 2017-01-25T05:12:06.001926004-05:00 AUDIT: id="e125973e-4ccb-44a2-9345-9baa474ad019" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:06.003559 4678 audit.go:45] 2017-01-25T05:12:06.003541234-05:00 AUDIT: id="e125973e-4ccb-44a2-9345-9baa474ad019" response="200" I0125 05:12:06.003804 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (2.183067ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:06.004094 4678 docker_manager.go:1938] Found pod infra container for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.004167 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.004186 4678 docker_manager.go:1999] pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as 269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d I0125 05:12:06.004294 4678 docker_manager.go:2086] Got container changes for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f:-1 269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d:0]} I0125 05:12:06.093620 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:12:06.307120 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11031", FieldPath:"spec.containers{postgresql}"}): type: 'Normal' reason: 'Created' Created container with docker id 1ebc67751226; Security:[seccomp=unconfined] I0125 05:12:06.308059 4678 audit.go:125] 2017-01-25T05:12:06.308014454-05:00 AUDIT: id="061f53c1-55ea-4a91-a3a0-8c9cfb068767" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:06.310366 4678 audit.go:45] 2017-01-25T05:12:06.3103544-05:00 AUDIT: id="061f53c1-55ea-4a91-a3a0-8c9cfb068767" response="201" I0125 05:12:06.310417 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.683214ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:06.631810 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11031", FieldPath:"spec.containers{postgresql}"}): type: 'Normal' reason: 'Started' Started container with docker id 1ebc67751226 I0125 05:12:06.632441 4678 audit.go:125] 2017-01-25T05:12:06.63238935-05:00 AUDIT: id="cd48538e-ce36-4862-95ab-0d2f803c4d22" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:06.635221 4678 audit.go:45] 2017-01-25T05:12:06.635185754-05:00 AUDIT: id="cd48538e-ce36-4862-95ab-0d2f803c4d22" response="201" I0125 05:12:06.635291 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.191913ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:06.639685 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208.scope" E0125 05:12:06.640531 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql": symlink /var/log/containers/postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208.log: no such file or directory I0125 05:12:06.703336 4678 generic.go:145] GenericPLEG: b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208: non-existent -> running I0125 05:12:06.703366 4678 generic.go:145] GenericPLEG: b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017: non-existent -> running I0125 05:12:06.703376 4678 generic.go:145] GenericPLEG: b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741: non-existent -> running I0125 05:12:06.798780 4678 audit.go:125] 2017-01-25T05:12:06.798730755-05:00 AUDIT: id="e49acf4c-58cb-4ae0-b255-5dd79e536037" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:12:06.799311 4678 audit.go:45] 2017-01-25T05:12:06.799296498-05:00 AUDIT: id="e49acf4c-58cb-4ae0-b255-5dd79e536037" response="200" I0125 05:12:06.799660 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.206387ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:06.844490 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4309c1e40 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql/422ec933 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~empty-dir/postgresql-helper-data Destination:/var/lib/pgsql/data Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate}] Config:0xc42f522b40 NetworkSettings:0xc437ee3d00} I0125 05:12:06.882815 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc429372c60 Mounts:[] Config:0xc42ef10900 NetworkSettings:0xc429d63f00} I0125 05:12:06.892578 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-cpv6d/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-cpv6d", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.8", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc4344e8460), (*container.ContainerStatus)(0xc4344e88c0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:06.892762 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208"} I0125 05:12:06.892812 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017"} I0125 05:12:06.892860 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.893045 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.893807 4678 audit.go:125] 2017-01-25T05:12:06.893760582-05:00 AUDIT: id="5a496f78-174d-4ae1-9d0f-83b7f94cbe05" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:06.895334 4678 audit.go:45] 2017-01-25T05:12:06.895320229-05:00 AUDIT: id="5a496f78-174d-4ae1-9d0f-83b7f94cbe05" response="200" I0125 05:12:06.895469 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (1.986457ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:06.896647 4678 audit.go:125] 2017-01-25T05:12:06.896610274-05:00 AUDIT: id="2ad7d2e0-7098-416e-9f06-7631557d22b1" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status" I0125 05:12:06.899309 4678 audit.go:45] 2017-01-25T05:12:06.899295238-05:00 AUDIT: id="2ad7d2e0-7098-416e-9f06-7631557d22b1" response="200" I0125 05:12:06.899400 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status: (3.047362ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:06.900924 4678 replication_controller.go:378] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11035 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11051 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:06.901113 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (36.745µs) I0125 05:12:06.901146 4678 replica_set.go:320] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11035 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11051 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:06.901271 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:12:06.901306 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:12:06.901333 4678 daemoncontroller.go:332] Pod postgresql-helper-1-cpv6d updated. I0125 05:12:06.901365 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:12:06.901389 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-cpv6d" I0125 05:12:06.901407 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:12:06.901413 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:12:06.901753 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:12:06.902179 4678 status_manager.go:425] Status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.8 StartTime:0xc426a5ce80 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running:0xc42f642280 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} version:2 podName:postgresql-helper-1-cpv6d podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:06.902391 4678 config.go:281] Setting pods for source api I0125 05:12:06.904027 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.906637 4678 audit.go:125] 2017-01-25T05:12:06.906594412-05:00 AUDIT: id="e4bdb01b-9976-4ad7-a958-71dabfd08d15" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:06.907692 4678 audit.go:45] 2017-01-25T05:12:06.907678214-05:00 AUDIT: id="e4bdb01b-9976-4ad7-a958-71dabfd08d15" response="200" I0125 05:12:06.907764 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (3.463545ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:06.908054 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 1 I0125 05:12:06.910295 4678 audit.go:125] 2017-01-25T05:12:06.910257976-05:00 AUDIT: id="873dc5de-6efa-4ef4-a64e-07425343942c" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:06.912387 4678 audit.go:45] 2017-01-25T05:12:06.912371062-05:00 AUDIT: id="873dc5de-6efa-4ef4-a64e-07425343942c" response="200" I0125 05:12:06.912450 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (3.966505ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:06.912957 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:06.913085 4678 proxier.go:804] Syncing iptables rules I0125 05:12:06.913097 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:06.929622 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:06.929720 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:06.929737 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:06.929830 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:06.929840 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:06.929851 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:06.929861 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:06.930437 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (28.957919ms) I0125 05:12:06.955010 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42e93da20 Mounts:[] Config:0xc431418b40 NetworkSettings:0xc43635e300} I0125 05:12:06.960498 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:06.961483 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:06.977667 4678 audit.go:125] 2017-01-25T05:12:06.977591409-05:00 AUDIT: id="b0ad1a96-20ce-43ce-9e95-6cb7ab31fce6" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:12:06.980659 4678 generic.go:342] PLEG: Write status for postgresql-master-1-6jfgj/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-6jfgj", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.7", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc4344e9180)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:06.980976 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.981388 4678 status_manager.go:312] Ignoring same status for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:11:56 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:05 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:11:56 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:2017-01-25 05:11:56 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc4307a6e00 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d}]} I0125 05:12:06.981681 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.982761 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741"} I0125 05:12:06.982905 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.983019 4678 audit.go:45] 2017-01-25T05:12:06.983000009-05:00 AUDIT: id="b0ad1a96-20ce-43ce-9e95-6cb7ab31fce6" response="200" I0125 05:12:06.983235 4678 status_manager.go:312] Ignoring same status for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:11:57 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:05 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:11:57 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.5 StartTime:2017-01-25 05:11:57 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc4307a7e00 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d}]} I0125 05:12:06.983419 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:06.983629 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (6.485977ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:06.984068 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:12:06.984403 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:06.984621 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:07.010993 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.051834 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.069878 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:07.070704 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:07.072063 4678 audit.go:125] 2017-01-25T05:12:07.072013211-05:00 AUDIT: id="573a8716-2184-49e0-94da-b15f88d88efc" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:07.073293 4678 audit.go:125] 2017-01-25T05:12:07.073253015-05:00 AUDIT: id="f91ec171-f0e6-45c1-b4c1-6bc2a013212c" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:07.080551 4678 audit.go:45] 2017-01-25T05:12:07.080532259-05:00 AUDIT: id="f91ec171-f0e6-45c1-b4c1-6bc2a013212c" response="200" I0125 05:12:07.080867 4678 audit.go:45] 2017-01-25T05:12:07.080855504-05:00 AUDIT: id="573a8716-2184-49e0-94da-b15f88d88efc" response="200" I0125 05:12:07.081105 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (8.142188ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:07.081174 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (9.543516ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:07.081622 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:07.081916 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:07.082077 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:07.082292 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:07.082550 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:07.082723 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:07.104337 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.136844 4678 audit.go:125] 2017-01-25T05:12:07.136779492-05:00 AUDIT: id="5045b462-14ec-4dce-8ed0-414002fd8502" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:12:07.142715 4678 audit.go:45] 2017-01-25T05:12:07.142693594-05:00 AUDIT: id="5045b462-14ec-4dce-8ed0-414002fd8502" response="200" I0125 05:12:07.143734 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (7.263849ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:07.144447 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:12:07.146820 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:07.180565 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:07.196970 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.199645 4678 audit.go:125] 2017-01-25T05:12:07.199594257-05:00 AUDIT: id="f43fb0d5-ac77-4a2d-a5c9-bb4442ea018f" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:12:07.201640 4678 audit.go:45] 2017-01-25T05:12:07.201622701-05:00 AUDIT: id="f43fb0d5-ac77-4a2d-a5c9-bb4442ea018f" response="200" I0125 05:12:07.201904 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.694402ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:07.202335 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.202404 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.202426 4678 docker_manager.go:1999] pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql" exists as 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208 I0125 05:12:07.202591 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017:-1 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208:0]} I0125 05:12:07.214947 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:07.249739 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:07.266952 4678 manager.go:898] Added container: "/system.slice/docker-1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208.scope" (aliases: [k8s_postgresql.f954765b_postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8_b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094_422ec933 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208], namespace: "docker") I0125 05:12:07.267342 4678 handler.go:325] Added event &{/system.slice/docker-1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208.scope 2017-01-25 05:12:06.491881953 -0500 EST containerCreation {}} I0125 05:12:07.267431 4678 container.go:407] Start housekeeping for container "/system.slice/docker-1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208.scope" I0125 05:12:07.284717 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.286593 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.287759 4678 audit.go:125] 2017-01-25T05:12:07.287701256-05:00 AUDIT: id="be91a825-1b9b-49be-b6e8-33248af510fa" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:07.289548 4678 audit.go:125] 2017-01-25T05:12:07.289451939-05:00 AUDIT: id="bc8a8db7-a92e-4db0-8eec-a0ffbb2c0b61" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:07.291170 4678 audit.go:45] 2017-01-25T05:12:07.291144636-05:00 AUDIT: id="be91a825-1b9b-49be-b6e8-33248af510fa" response="200" I0125 05:12:07.291709 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (4.38571ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:07.292183 4678 audit.go:45] 2017-01-25T05:12:07.292162351-05:00 AUDIT: id="bc8a8db7-a92e-4db0-8eec-a0ffbb2c0b61" response="200" I0125 05:12:07.292421 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.292482 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (3.81974ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:07.292508 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.292526 4678 docker_manager.go:1999] pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as 57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d I0125 05:12:07.292701 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b:-1 57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d:0]} I0125 05:12:07.292980 4678 docker_manager.go:1938] Found pod infra container for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.293027 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:07.293041 4678 docker_manager.go:1999] pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as 269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d I0125 05:12:07.293100 4678 docker_manager.go:2086] Got container changes for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d:0 764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f:-1]} I0125 05:12:07.316752 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:07.316790 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:07.352250 4678 proxier.go:797] syncProxyRules took 439.158341ms I0125 05:12:07.352289 4678 proxier.go:566] OnEndpointsUpdate took 439.267077ms for 6 endpoints I0125 05:12:07.352329 4678 proxier.go:381] Received update notice: [] I0125 05:12:07.352376 4678 proxier.go:804] Syncing iptables rules I0125 05:12:07.352386 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:07.385390 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:07.417418 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.449778 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.474514 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.508363 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:07.552360 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:07.593516 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:07.627544 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:07.657111 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:07.657144 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:07.688378 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:07.697760 4678 proxier.go:797] syncProxyRules took 345.376805ms I0125 05:12:07.697801 4678 proxier.go:431] OnServiceUpdate took 345.456317ms for 4 services I0125 05:12:07.697855 4678 proxier.go:804] Syncing iptables rules I0125 05:12:07.697872 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:07.734651 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:07.766958 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.799124 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.826216 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:07.859160 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:07.891510 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:07.924193 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:07.957898 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:07.994553 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:07.994595 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:08.019792 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:08.020098 4678 status_manager.go:312] Ignoring same status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:} {Type:Ready Status:False LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason:ContainersNotReady Message:containers with unready status: [postgresql]} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.8 StartTime:2017-01-25 05:12:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running:0xc4352d26c0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} I0125 05:12:08.020263 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:08.058638 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:08.059589 4678 audit.go:125] 2017-01-25T05:12:08.059544459-05:00 AUDIT: id="b327627c-9944-4861-80fe-2fdb1bd1fce5" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:12:08.061128 4678 audit.go:45] 2017-01-25T05:12:08.061111365-05:00 AUDIT: id="b327627c-9944-4861-80fe-2fdb1bd1fce5" response="200" I0125 05:12:08.061455 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (2.196818ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:08.061719 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:12:08.061957 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:08.062105 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:08.062764 4678 proxier.go:797] syncProxyRules took 364.907353ms I0125 05:12:08.062796 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:12:08.107036 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:12:08.153976 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:12:08.199000 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:12:08.230423 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:12:08.262475 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:12:08.296441 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:12:08.312655 4678 audit.go:125] 2017-01-25T05:12:08.312603614-05:00 AUDIT: id="01d979d0-8059-42b0-bbad-fc91b0b86cb3" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:08.313321 4678 audit.go:45] 2017-01-25T05:12:08.313308181-05:00 AUDIT: id="01d979d0-8059-42b0-bbad-fc91b0b86cb3" response="200" I0125 05:12:08.313835 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.606056ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:08.314465 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:12:08.322402 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:08.323308 4678 audit.go:125] 2017-01-25T05:12:08.323265142-05:00 AUDIT: id="0005cb93-c6a5-4616-9d5b-dcf1701ba2e3" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:12:08.324800 4678 audit.go:45] 2017-01-25T05:12:08.324786148-05:00 AUDIT: id="0005cb93-c6a5-4616-9d5b-dcf1701ba2e3" response="200" I0125 05:12:08.325028 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.041607ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:08.325308 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:08.325376 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:08.325398 4678 docker_manager.go:1999] pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql" exists as 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208 I0125 05:12:08.325556 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017:-1 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208:0]} I0125 05:12:08.328342 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:12:08.365343 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:12:08.400861 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:12:09.156376 4678 audit.go:125] 2017-01-25T05:12:09.156333289-05:00 AUDIT: id="63c4cc3b-f8a2-4f20-896c-1a282f6bd292" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:12:09.157442 4678 audit.go:45] 2017-01-25T05:12:09.157427289-05:00 AUDIT: id="63c4cc3b-f8a2-4f20-896c-1a282f6bd292" response="200" I0125 05:12:09.157541 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (5.08179ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:09.157859 4678 controller.go:106] Found 0 cronjobs I0125 05:12:09.160050 4678 audit.go:125] 2017-01-25T05:12:09.160016195-05:00 AUDIT: id="b0d93b3b-02bd-4cc2-a90a-54fb44dad3d9" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:12:09.161071 4678 audit.go:45] 2017-01-25T05:12:09.161056852-05:00 AUDIT: id="b0d93b3b-02bd-4cc2-a90a-54fb44dad3d9" response="200" I0125 05:12:09.161151 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (3.019526ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:09.161429 4678 controller.go:114] Found 0 jobs I0125 05:12:09.161441 4678 controller.go:117] Found 0 groups I0125 05:12:09.261430 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:09.261468 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:09.263407 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:09.263429 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:09.263839 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc431b034a0 -1 [] true false map[] 0xc436d71d10 } I0125 05:12:09.263888 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:09.266501 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc430be4940 -1 [] true false map[] 0xc42d5b83c0 } I0125 05:12:09.266545 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:09.419705 4678 worker.go:162] Probe target container not found: postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-master I0125 05:12:09.571372 4678 panics.go:76] GET /api/v1/watch/persistentvolumeclaims?resourceVersion=8550&timeoutSeconds=548: (9m8.005139189s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:12:09.571699 4678 reflector.go:392] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: Watch close - *api.PersistentVolumeClaim total 3 items received I0125 05:12:09.574475 4678 audit.go:125] 2017-01-25T05:12:09.574431004-05:00 AUDIT: id="52ca4198-a37a-41a8-b5e4-44276799d311" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/persistentvolumeclaims?resourceVersion=10942&timeoutSeconds=435" I0125 05:12:09.574951 4678 audit.go:45] 2017-01-25T05:12:09.574936748-05:00 AUDIT: id="52ca4198-a37a-41a8-b5e4-44276799d311" response="200" I0125 05:12:09.615361 4678 status_manager.go:190] Container readiness unchanged (false): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" - "docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208" I0125 05:12:09.686737 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094) I0125 05:12:09.686805 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:09.686989 4678 kubelet_pods.go:1029] Generating status for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:09.687188 4678 status_manager.go:312] Ignoring same status for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:42 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.3 StartTime:2017-01-25 03:40:22 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:registry State:{Waiting: Running:0xc42fdc9180 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-docker-registry:86a9783 ImageID:docker://sha256:3ec55bd72e2d99d049485e7f0556140392c415053ffba63b99bdeca83d4e5b7f ContainerID:docker://b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b}]} I0125 05:12:09.687345 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:09.712477 4678 secret.go:179] Setting up volume registry-token-vjbst for pod e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:12:09.713509 4678 audit.go:125] 2017-01-25T05:12:09.713468835-05:00 AUDIT: id="a51521ce-4b6e-41fa-ab35-866662e048b5" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-token-vjbst" I0125 05:12:09.714861 4678 audit.go:45] 2017-01-25T05:12:09.714845517-05:00 AUDIT: id="a51521ce-4b6e-41fa-ab35-866662e048b5" response="200" I0125 05:12:09.715105 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-token-vjbst: (1.927232ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:09.715382 4678 secret.go:206] Received secret default/registry-token-vjbst containing (4) pieces of data, 4113 total bytes I0125 05:12:09.716139 4678 atomic_writer.go:142] pod default/docker-registry-1-xppm3 volume registry-token-vjbst: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:12:09.716317 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094-registry-token-vjbst" (spec.Name: "registry-token-vjbst") pod "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094" (UID: "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:09.817552 4678 panics.go:76] GET /api/v1/watch/endpoints?resourceVersion=853: (1h30m39.824148011s) 200 [[openshift-router/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:60456] I0125 05:12:09.821446 4678 audit.go:125] 2017-01-25T05:12:09.821398887-05:00 AUDIT: id="b504b112-ee42-44b7-acf6-6031c1638ed9" ip="172.18.7.222" method="GET" user="system:serviceaccount:default:router" as="" asgroups="" namespace="" uri="/api/v1/watch/endpoints?resourceVersion=11052" I0125 05:12:09.821934 4678 audit.go:45] 2017-01-25T05:12:09.821921336-05:00 AUDIT: id="b504b112-ee42-44b7-acf6-6031c1638ed9" response="200" I0125 05:12:09.987618 4678 volume_manager.go:365] All volumes are attached and mounted for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:09.989763 4678 audit.go:125] 2017-01-25T05:12:09.989719264-05:00 AUDIT: id="e4a3bbc9-2580-48cd-b035-8f13fdd115f7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c" I0125 05:12:09.992081 4678 audit.go:45] 2017-01-25T05:12:09.992062149-05:00 AUDIT: id="e4a3bbc9-2580-48cd-b035-8f13fdd115f7" response="200" I0125 05:12:09.992320 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c: (2.85559ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:09.992585 4678 docker_manager.go:1938] Found pod infra container for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:09.992655 4678 docker_manager.go:1951] Pod infra container looks good, keep it "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:09.992674 4678 docker_manager.go:1999] pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" container "registry" exists as b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b I0125 05:12:09.992835 4678 docker_manager.go:2086] Got container changes for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59:-1 b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b:0]} I0125 05:12:10.931588 4678 worker.go:162] Probe target container not found: postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-slave I0125 05:12:10.932511 4678 worker.go:162] Probe target container not found: postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-slave I0125 05:12:11.684643 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:11.917821 4678 audit.go:125] 2017-01-25T05:12:11.917787153-05:00 AUDIT: id="b072f9a5-f768-46d1-b316-17b24e5ccf73" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:12:11.919029 4678 audit.go:45] 2017-01-25T05:12:11.919013756-05:00 AUDIT: id="b072f9a5-f768-46d1-b316-17b24e5ccf73" response="200" I0125 05:12:11.919103 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.507502ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:12.109269 4678 worker.go:162] Probe target container not found: postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-master I0125 05:12:12.110163 4678 worker.go:162] Probe target container not found: postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-master I0125 05:12:12.905423 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:12.967205 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:12.967236 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:12.967977 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:12.967993 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:12.968519 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Date:[Wed, 25 Jan 2017 10:12:12 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache]] 0xc4329f7360 0 [] true false map[] 0xc43310cb40 } I0125 05:12:12.968567 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:12.969172 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:12 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4329f7480 0 [] true false map[] 0xc42779a780 } I0125 05:12:12.969227 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:13.315438 4678 audit.go:125] 2017-01-25T05:12:13.315405411-05:00 AUDIT: id="616e2fd0-09a3-4d05-bef7-b9af1de60952" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:13.315822 4678 audit.go:45] 2017-01-25T05:12:13.315813825-05:00 AUDIT: id="616e2fd0-09a3-4d05-bef7-b9af1de60952" response="200" I0125 05:12:13.316116 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (916.917µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:13.684651 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:14.007231 4678 gc_controller.go:175] GC'ing orphaned I0125 05:12:14.007254 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:12:14.700888 4678 kube_docker_client.go:328] Pulling image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389": "ca2fb7e20031: Extracting [==================================================>] 39.81 MB/39.81 MB" I0125 05:12:15.033736 4678 kube_docker_client.go:328] Pulling image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389": "ca2fb7e20031: Extracting [==================================================>] 39.81 MB/39.81 MB" I0125 05:12:15.196569 4678 audit.go:125] 2017-01-25T05:12:15.196538201-05:00 AUDIT: id="170bec60-7495-4fa2-a9e0-81855e14264a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:12:15.197466 4678 audit.go:45] 2017-01-25T05:12:15.197455469-05:00 AUDIT: id="170bec60-7495-4fa2-a9e0-81855e14264a" response="200" I0125 05:12:15.197526 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.191768ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:16.119051 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:16.119070 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:16.218845 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:12:17.145609 4678 audit.go:125] 2017-01-25T05:12:17.145574058-05:00 AUDIT: id="e3d38265-efd5-4970-ab48-ba3974afbb13" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:12:17.146005 4678 audit.go:45] 2017-01-25T05:12:17.145994118-05:00 AUDIT: id="e3d38265-efd5-4970-ab48-ba3974afbb13" response="200" I0125 05:12:17.146307 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (931.776µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:17.213216 4678 audit.go:125] 2017-01-25T05:12:17.21317009-05:00 AUDIT: id="158cd3ab-4502-4222-bd6d-ddeb71bafe8a" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:12:17.215453 4678 audit.go:45] 2017-01-25T05:12:17.215434429-05:00 AUDIT: id="158cd3ab-4502-4222-bd6d-ddeb71bafe8a" response="200" I0125 05:12:17.216138 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (3.159636ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:17.216445 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:12:17.435129 4678 panics.go:76] GET /api/v1/watch/pods?fieldSelector=spec.nodeName%3D%2Cstatus.phase%21%3DFailed%2Cstatus.phase%21%3DSucceeded&resourceVersion=10130&timeoutSeconds=431: (7m11.001071941s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:17.435363 4678 reflector.go:392] github.com/openshift/origin/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/factory/factory.go:457: Watch close - *api.Pod total 34 items received I0125 05:12:17.435974 4678 audit.go:125] 2017-01-25T05:12:17.435942231-05:00 AUDIT: id="2137b89d-6bda-4efc-aa93-d0d510f34ae1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/pods?fieldSelector=spec.nodeName%3D%2Cstatus.phase%21%3DFailed%2Cstatus.phase%21%3DSucceeded&resourceVersion=11031&timeoutSeconds=488" I0125 05:12:17.436406 4678 audit.go:45] 2017-01-25T05:12:17.436392622-05:00 AUDIT: id="2137b89d-6bda-4efc-aa93-d0d510f34ae1" response="200" I0125 05:12:18.323837 4678 audit.go:125] 2017-01-25T05:12:18.323771854-05:00 AUDIT: id="2fc4fae6-95fe-4845-b3bf-19e01f84981f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:18.324551 4678 audit.go:45] 2017-01-25T05:12:18.324538432-05:00 AUDIT: id="2fc4fae6-95fe-4845-b3bf-19e01f84981f" response="200" I0125 05:12:18.325065 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.63172ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:18.325641 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:12:19.131291 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:12:19.131355 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:12:19.131374 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:19.131395 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:19.131430 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:12:19.131438 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:19.131445 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:12:19.131501 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:12:19.131527 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:19.131540 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:12:19.131550 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:19.131559 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:12:19.131564 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:19.131577 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:19.131449 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:19.131653 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:19.131661 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:19.131666 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:19.131669 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:19.131702 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:19.131707 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:12:19.131735 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:12:19.131740 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:19.131752 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:19.131761 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:19.131780 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:12:19.164139 4678 audit.go:125] 2017-01-25T05:12:19.164100788-05:00 AUDIT: id="c44470b6-d722-43df-9d08-a68e1dda7204" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:12:19.165081 4678 audit.go:45] 2017-01-25T05:12:19.165064237-05:00 AUDIT: id="c44470b6-d722-43df-9d08-a68e1dda7204" response="200" I0125 05:12:19.165170 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.1458ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:19.165418 4678 controller.go:106] Found 0 cronjobs I0125 05:12:19.166909 4678 audit.go:125] 2017-01-25T05:12:19.16688774-05:00 AUDIT: id="fec341aa-b43c-4f8b-bdfb-d383b3b509fa" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:12:19.167634 4678 audit.go:45] 2017-01-25T05:12:19.167620633-05:00 AUDIT: id="fec341aa-b43c-4f8b-bdfb-d383b3b509fa" response="200" I0125 05:12:19.167695 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.101339ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:19.167874 4678 controller.go:114] Found 0 jobs I0125 05:12:19.167885 4678 controller.go:117] Found 0 groups I0125 05:12:19.260977 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:19.261000 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:19.261783 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:19.261799 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:19.262150 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc436896da0 -1 [] true false map[] 0xc432b28690 } I0125 05:12:19.262213 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:19.262380 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc435b9b700 -1 [] true false map[] 0xc432b28870 } I0125 05:12:19.262413 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:20.931582 4678 worker.go:162] Probe target container not found: postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-slave I0125 05:12:20.932461 4678 worker.go:162] Probe target container not found: postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-slave I0125 05:12:21.624339 4678 panics.go:76] GET /api/v1/watch/nodes?resourceVersion=10172&timeoutSeconds=420: (7m0.00293987s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:gc-controller] 172.18.7.222:50846] I0125 05:12:21.624574 4678 reflector.go:392] pkg/controller/podgc/gc_controller.go:110: Watch close - *api.Node total 41 items received I0125 05:12:21.626896 4678 audit.go:125] 2017-01-25T05:12:21.626857638-05:00 AUDIT: id="94031521-2232-4c59-aa50-950c364b087d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:gc-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/nodes?resourceVersion=11055&timeoutSeconds=369" I0125 05:12:21.627334 4678 audit.go:45] 2017-01-25T05:12:21.627324651-05:00 AUDIT: id="94031521-2232-4c59-aa50-950c364b087d" response="200" I0125 05:12:21.920282 4678 audit.go:125] 2017-01-25T05:12:21.920243755-05:00 AUDIT: id="8b49f077-4d3b-4028-94c5-4a383830f986" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:12:21.921441 4678 audit.go:45] 2017-01-25T05:12:21.921429696-05:00 AUDIT: id="8b49f077-4d3b-4028-94c5-4a383830f986" response="200" I0125 05:12:21.921519 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.491127ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:22.109275 4678 worker.go:162] Probe target container not found: postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-master I0125 05:12:22.110225 4678 worker.go:162] Probe target container not found: postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-master I0125 05:12:22.967199 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:22.967229 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:22.967938 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:22.967955 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:22.968510 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:22 GMT]] 0xc42ff2da80 0 [] true false map[] 0xc432b292c0 } I0125 05:12:22.968556 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:22.968817 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:22 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42ff2dba0 0 [] true false map[] 0xc435068b40 } I0125 05:12:22.968857 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:23.326653 4678 audit.go:125] 2017-01-25T05:12:23.326621481-05:00 AUDIT: id="46c73db6-8a54-47c9-84c5-facf87899d3e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:23.327052 4678 audit.go:45] 2017-01-25T05:12:23.327043436-05:00 AUDIT: id="46c73db6-8a54-47c9-84c5-facf87899d3e" response="200" I0125 05:12:23.327397 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (971.184µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:24.684882 4678 panics.go:76] GET /api/v1/watch/nodes?resourceVersion=10181&timeoutSeconds=379: (6m19.001117599s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:24.685108 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *api.Node total 37 items received I0125 05:12:24.685772 4678 audit.go:125] 2017-01-25T05:12:24.685741972-05:00 AUDIT: id="9e78d3b7-23b5-40c8-842a-3d0befbf70ac" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/nodes?resourceVersion=11055&timeoutSeconds=461" I0125 05:12:24.686235 4678 audit.go:45] 2017-01-25T05:12:24.686224677-05:00 AUDIT: id="9e78d3b7-23b5-40c8-842a-3d0befbf70ac" response="200" I0125 05:12:24.700891 4678 kube_docker_client.go:328] Pulling image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389": "ca2fb7e20031: Extracting [==================================================>] 39.81 MB/39.81 MB" I0125 05:12:25.007828 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:25.032221 4678 kube_docker_client.go:331] Stop pulling image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389": "Status: Downloaded newer image for docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:12:25.032349 4678 kube_docker_client.go:331] Stop pulling image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389": "Status: Image is up to date for docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:12:25.035557 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master podIP: "172.17.0.7" creating hosts mount: true I0125 05:12:25.036548 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11018", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Pulled' Successfully pulled image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:12:25.037131 4678 audit.go:125] 2017-01-25T05:12:25.037091793-05:00 AUDIT: id="cd69ae5d-8a6e-4b2c-94ff-f5a3a1c5e372" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:25.038505 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave podIP: "172.17.0.6" creating hosts mount: true I0125 05:12:25.038937 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11002", FieldPath:"spec.containers{postgresql-slave}"}): type: 'Normal' reason: 'Pulled' Successfully pulled image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:12:25.041057 4678 audit.go:45] 2017-01-25T05:12:25.041036521-05:00 AUDIT: id="cd69ae5d-8a6e-4b2c-94ff-f5a3a1c5e372" response="201" I0125 05:12:25.041132 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.279728ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:25.042129 4678 audit.go:125] 2017-01-25T05:12:25.042092733-05:00 AUDIT: id="6329aec0-a365-456f-a887-4ef4d063a318" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:25.044340 4678 audit.go:45] 2017-01-25T05:12:25.044325559-05:00 AUDIT: id="6329aec0-a365-456f-a887-4ef4d063a318" response="201" I0125 05:12:25.044405 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.56848ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:25.154605 4678 expiration_cache.go:98] Entry f9370ed252a14f73b014c1301a9b6d1b: {key:f9370ed252a14f73b014c1301a9b6d1b obj:{apiVersion:1.24 daemonVersion:0xc42938fac0}} has expired I0125 05:12:25.206676 4678 audit.go:125] 2017-01-25T05:12:25.206634324-05:00 AUDIT: id="52e27e92-d9f0-4ad3-b0fe-3c77b63ee33d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:12:25.207866 4678 audit.go:45] 2017-01-25T05:12:25.20785017-05:00 AUDIT: id="52e27e92-d9f0-4ad3-b0fe-3c77b63ee33d" response="200" I0125 05:12:25.207954 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.579313ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:25.249207 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:25.249240 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:12:25.249289 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:25.250307 4678 audit.go:125] 2017-01-25T05:12:25.250267709-05:00 AUDIT: id="b585d6eb-935f-41f0-aec4-d74b1e049a57" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:25.252158 4678 audit.go:45] 2017-01-25T05:12:25.252141252-05:00 AUDIT: id="b585d6eb-935f-41f0-aec4-d74b1e049a57" response="200" I0125 05:12:25.252312 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (2.340181ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:25.253407 4678 audit.go:125] 2017-01-25T05:12:25.25337197-05:00 AUDIT: id="079dc786-bf02-4c35-8ebd-1983730f0878" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status" I0125 05:12:25.256013 4678 audit.go:45] 2017-01-25T05:12:25.255997499-05:00 AUDIT: id="079dc786-bf02-4c35-8ebd-1983730f0878" response="200" I0125 05:12:25.256109 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status: (3.010603ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:25.257513 4678 replication_controller.go:378] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11051 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11059 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:25.257711 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 1->1 (need 1), fullyLabeledReplicas 1->1, readyReplicas 0->1, availableReplicas 0->1, sequence No: 2->2 I0125 05:12:25.258054 4678 replica_set.go:320] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11051 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11059 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:25.258161 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:12:25.258194 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:12:25.258234 4678 daemoncontroller.go:332] Pod postgresql-helper-1-cpv6d updated. I0125 05:12:25.258268 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:12:25.258291 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-cpv6d" I0125 05:12:25.258308 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:12:25.258313 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:12:25.258603 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:12:25.258761 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:25.258750074 -0500 EST. I0125 05:12:25.259355 4678 status_manager.go:425] Status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935945 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.8 StartTime:0xc426a5ce80 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running:0xc42dcf9600 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} version:3 podName:postgresql-helper-1-cpv6d podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:25.259568 4678 config.go:281] Setting pods for source api I0125 05:12:25.261114 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:25.262472 4678 audit.go:125] 2017-01-25T05:12:25.262431179-05:00 AUDIT: id="373e3886-fc7e-419a-bc10-ddb4c530c89b" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:12:25.267433 4678 audit.go:125] 2017-01-25T05:12:25.267386425-05:00 AUDIT: id="15007b3e-6b38-47c3-8fc0-c23536ca2afd" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:12:25.268654 4678 audit.go:125] 2017-01-25T05:12:25.268619559-05:00 AUDIT: id="70462794-dac1-401b-bf8d-6e87b12aca7b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:25.272019 4678 audit.go:45] 2017-01-25T05:12:25.272003392-05:00 AUDIT: id="70462794-dac1-401b-bf8d-6e87b12aca7b" response="200" I0125 05:12:25.272096 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (7.807642ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:25.272398 4678 audit.go:45] 2017-01-25T05:12:25.272384916-05:00 AUDIT: id="15007b3e-6b38-47c3-8fc0-c23536ca2afd" response="200" I0125 05:12:25.273574 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (9.876163ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:25.274086 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 1 not ready: 0 I0125 05:12:25.274443 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (16.776463ms) I0125 05:12:25.274613 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 1->1 I0125 05:12:25.274707 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (43.183µs) I0125 05:12:25.278741 4678 audit.go:125] 2017-01-25T05:12:25.278694365-05:00 AUDIT: id="5ee96cd3-f4e6-43ad-b1f6-4fd9ee73989d" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:25.280988 4678 audit.go:45] 2017-01-25T05:12:25.280972786-05:00 AUDIT: id="5ee96cd3-f4e6-43ad-b1f6-4fd9ee73989d" response="200" I0125 05:12:25.281052 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (6.047425ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:25.281508 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:25.281574 4678 roundrobin.go:257] LoadBalancerRR: Setting endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql to [172.17.0.8:5432] I0125 05:12:25.281595 4678 roundrobin.go:83] LoadBalancerRR service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql" did not exist, created I0125 05:12:25.281663 4678 proxier.go:616] Setting endpoints for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql" to [172.17.0.8:5432] I0125 05:12:25.281703 4678 proxier.go:804] Syncing iptables rules I0125 05:12:25.281713 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:25.298757 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (40.377534ms) I0125 05:12:25.300352 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:25.300458 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:25.300503 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:25.300515 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:25.300525 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:25.300536 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:12:25.300612 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:25.300623 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:25.302272 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:12:25.306538 4678 audit.go:45] 2017-01-25T05:12:25.306515642-05:00 AUDIT: id="373e3886-fc7e-419a-bc10-ddb4c530c89b" response="200" I0125 05:12:25.306755 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (44.592178ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:25.307364 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 2) I0125 05:12:25.307505 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:25.307492689 -0500 EST. I0125 05:12:25.308897 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:12:25.309656 4678 audit.go:125] 2017-01-25T05:12:25.30961545-05:00 AUDIT: id="7e089d4d-9d39-4234-8eda-53e34d8316c0" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:12:25.310664 4678 audit.go:45] 2017-01-25T05:12:25.310650337-05:00 AUDIT: id="7e089d4d-9d39-4234-8eda-53e34d8316c0" response="409" I0125 05:12:25.310748 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (1.462238ms) 409 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:25.311095 4678 controller.go:294] Cannot update the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper": Operation cannot be fulfilled on deploymentconfigs "postgresql-helper": the object has been modified; please apply your changes to the latest version and try again I0125 05:12:25.311112 4678 controller.go:393] Error syncing deployment config extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper: Operation cannot be fulfilled on deploymentconfigs "postgresql-helper": the object has been modified; please apply your changes to the latest version and try again I0125 05:12:25.311262 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:25.311250101 -0500 EST. I0125 05:12:25.312141 4678 audit.go:125] 2017-01-25T05:12:25.312094735-05:00 AUDIT: id="d4f4aad9-19d2-47f5-b02a-a3ef58afe3f4" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:12:25.314962 4678 audit.go:45] 2017-01-25T05:12:25.314948212-05:00 AUDIT: id="d4f4aad9-19d2-47f5-b02a-a3ef58afe3f4" response="200" I0125 05:12:25.315091 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (3.211225ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:25.315515 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 2) I0125 05:12:25.316283 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:12:25.316421 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:25.316410396 -0500 EST. I0125 05:12:25.316953 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:25.316944683 -0500 EST. I0125 05:12:25.326109 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:25.348981 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave: setting entrypoint "[]" and command "[run-postgresql-slave]" I0125 05:12:25.359582 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master: setting entrypoint "[]" and command "[run-postgresql-master]" I0125 05:12:25.370082 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:25.403318 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:25.415417 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:25.415466 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:12:25.424650 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:25.444023 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:25.470612 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:25.487482 4678 panics.go:76] GET /api/v1/watch/namespaces?resourceVersion=9428&timeoutSeconds=547: (9m7.011457359s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:25.490705 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *api.Namespace total 32 items received I0125 05:12:25.491729 4678 audit.go:125] 2017-01-25T05:12:25.49169007-05:00 AUDIT: id="f7ab0178-db64-454f-91d6-50d5a5b7938e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/namespaces?resourceVersion=10903&timeoutSeconds=345" I0125 05:12:25.492501 4678 audit.go:45] 2017-01-25T05:12:25.492487756-05:00 AUDIT: id="f7ab0178-db64-454f-91d6-50d5a5b7938e" response="200" I0125 05:12:25.500225 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:25.527554 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:25.555601 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:25.555645 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:25.579580 4678 proxier.go:797] syncProxyRules took 297.87015ms I0125 05:12:25.579608 4678 proxier.go:566] OnEndpointsUpdate took 297.992913ms for 6 endpoints I0125 05:12:25.579665 4678 proxier.go:381] Received update notice: [] I0125 05:12:25.579709 4678 proxier.go:804] Syncing iptables rules I0125 05:12:25.579720 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:25.608132 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:25.629378 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:25.647920 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:25.668067 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:25.684673 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:25.688625 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:25.709163 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:25.727854 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:25.744849 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-helper-1%2Cdeploymentconfig%3Dpostgresql-helper%2Cname%3Dpostgresql-helper&resourceVersion=11037&timeoutSeconds=401: (21.930537709s) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:25.746909 4678 audit.go:125] 2017-01-25T05:12:25.746848188-05:00 AUDIT: id="02e9061a-8a2d-4e34-9837-90bcb47041f5" ip="172.17.0.5" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-helper-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db3de835f-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:12:25.749398 4678 audit.go:45] 2017-01-25T05:12:25.749378948-05:00 AUDIT: id="02e9061a-8a2d-4e34-9837-90bcb47041f5" response="200" I0125 05:12:25.749549 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-helper-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db3de835f-e2e6-11e6-a4b0-0e6a5cbf0094: (5.518998ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.5:44926] I0125 05:12:25.755875 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:25.789446 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:25.789499 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:25.828000 4678 proxier.go:797] syncProxyRules took 248.281143ms I0125 05:12:25.828034 4678 proxier.go:431] OnServiceUpdate took 248.351118ms for 4 services I0125 05:12:26.005615 4678 generic.go:145] GenericPLEG: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d: running -> exited I0125 05:12:26.236435 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:26.236453 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:26.325572 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:12:26.613041 4678 panics.go:76] GET /oapi/v1/watch/imagestreams?resourceVersion=10086&timeoutSeconds=361: (6m1.001222062s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:26.613383 4678 reflector.go:392] github.com/openshift/origin/pkg/image/controller/factory.go:40: Watch close - *api.ImageStream total 5 items received I0125 05:12:26.614082 4678 audit.go:125] 2017-01-25T05:12:26.614046719-05:00 AUDIT: id="f8fddb05-5d9b-4175-aa90-dec1f826ac53" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/watch/imagestreams?resourceVersion=10739&timeoutSeconds=401" I0125 05:12:26.614568 4678 audit.go:45] 2017-01-25T05:12:26.614553408-05:00 AUDIT: id="f8fddb05-5d9b-4175-aa90-dec1f826ac53" response="200" I0125 05:12:26.770852 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4357eda20 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/ce4eccb1 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc437f32ea0 NetworkSettings:0xc42dd36500} I0125 05:12:26.894274 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11002", FieldPath:"spec.containers{postgresql-slave}"}): type: 'Normal' reason: 'Created' Created container with docker id 1122e1bd8a66; Security:[seccomp=unconfined] I0125 05:12:26.895164 4678 audit.go:125] 2017-01-25T05:12:26.895118256-05:00 AUDIT: id="498dd7e1-e62c-44b8-ad5d-25923adad95f" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:26.899416 4678 audit.go:45] 2017-01-25T05:12:26.899385261-05:00 AUDIT: id="498dd7e1-e62c-44b8-ad5d-25923adad95f" response="201" I0125 05:12:26.899478 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.652797ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:27.090249 4678 panics.go:76] GET /apis/storage.k8s.io/v1beta1/watch/storageclasses?resourceVersion=4&timeoutSeconds=464: (7m44.000939466s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:27.090536 4678 reflector.go:392] github.com/openshift/origin/vendor/k8s.io/kubernetes/plugin/pkg/admission/storageclass/default/admission.go:75: Watch close - *storage.StorageClass total 0 items received I0125 05:12:27.091164 4678 audit.go:125] 2017-01-25T05:12:27.091129319-05:00 AUDIT: id="c23d2cb8-d67c-4240-863e-c40d95dc001b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/storage.k8s.io/v1beta1/watch/storageclasses?resourceVersion=4&timeoutSeconds=382" I0125 05:12:27.091533 4678 audit.go:45] 2017-01-25T05:12:27.091523494-05:00 AUDIT: id="c23d2cb8-d67c-4240-863e-c40d95dc001b" response="200" I0125 05:12:27.217322 4678 audit.go:125] 2017-01-25T05:12:27.217287602-05:00 AUDIT: id="def277cc-4492-4570-83a7-8eaea70d2836" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:12:27.217740 4678 audit.go:45] 2017-01-25T05:12:27.217730585-05:00 AUDIT: id="def277cc-4492-4570-83a7-8eaea70d2836" response="200" I0125 05:12:27.218043 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (966.874µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:27.284853 4678 audit.go:125] 2017-01-25T05:12:27.284822961-05:00 AUDIT: id="de9a2e9f-c1ea-4085-84b0-98fd0fed22b5" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:12:27.287311 4678 audit.go:45] 2017-01-25T05:12:27.287293083-05:00 AUDIT: id="de9a2e9f-c1ea-4085-84b0-98fd0fed22b5" response="200" I0125 05:12:27.287822 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (3.198136ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:27.288360 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:12:27.641613 4678 panics.go:76] GET /api/v1/watch/persistentvolumes?resourceVersion=8553&timeoutSeconds=435: (7m15.001176936s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:27.641840 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *api.PersistentVolume total 4 items received I0125 05:12:27.642436 4678 audit.go:125] 2017-01-25T05:12:27.642404483-05:00 AUDIT: id="dc9d94b5-aefc-4f38-9a8f-acce02515bb4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/persistentvolumes?resourceVersion=10938&timeoutSeconds=580" I0125 05:12:27.642892 4678 audit.go:45] 2017-01-25T05:12:27.642879249-05:00 AUDIT: id="dc9d94b5-aefc-4f38-9a8f-acce02515bb4" response="200" I0125 05:12:27.684644 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:27.706949 4678 helpers.go:101] Unable to get network stats from pid 10444: couldn't read network stats: failure opening /proc/10444/net/dev: open /proc/10444/net/dev: no such file or directory I0125 05:12:28.019518 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc429668c60 Mounts:[] Config:0xc42fc27440 NetworkSettings:0xc42e262700} I0125 05:12:28.022967 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.5", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42d8582a0), (*container.ContainerStatus)(0xc42d6b80e0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:28.024218 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11018", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Created' Created container with docker id 0b1e46b84aab; Security:[seccomp=unconfined] I0125 05:12:28.024830 4678 audit.go:125] 2017-01-25T05:12:28.024786079-05:00 AUDIT: id="de55ad8b-dde1-4c0c-b6b8-96a9428d2425" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:28.027689 4678 audit.go:45] 2017-01-25T05:12:28.027669472-05:00 AUDIT: id="de55ad8b-dde1-4c0c-b6b8-96a9428d2425" response="201" I0125 05:12:28.027757 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.24627ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.043763 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11002", FieldPath:"spec.containers{postgresql-slave}"}): type: 'Normal' reason: 'Started' Started container with docker id 1122e1bd8a66 I0125 05:12:28.044501 4678 audit.go:125] 2017-01-25T05:12:28.044455882-05:00 AUDIT: id="912d65f2-badb-4ca5-a40d-c92b2af682f4" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:28.046574 4678 audit.go:45] 2017-01-25T05:12:28.046563714-05:00 AUDIT: id="912d65f2-badb-4ca5-a40d-c92b2af682f4" response="201" I0125 05:12:28.046627 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.475567ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.089263 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48.scope" E0125 05:12:28.099300 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-slave": symlink /var/log/containers/postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-slave-1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48.log: no such file or directory I0125 05:12:28.263391 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:33315" with type 1 I0125 05:12:28.263468 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:28.263504 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:28.279966 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:45682" with type 28 I0125 05:12:28.280026 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:28.280058 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:28.280245 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:45682" with type 1 I0125 05:12:28.280261 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:28.280281 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:28.300879 4678 manager.go:898] Added container: "/system.slice/docker-1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48.scope" (aliases: [k8s_postgresql-slave.db39a3b3_postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8_b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094_089e472a 1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48], namespace: "docker") I0125 05:12:28.301037 4678 handler.go:325] Added event &{/system.slice/docker-1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48.scope 2017-01-25 05:12:27.010918968 -0500 EST containerCreation {}} I0125 05:12:28.301142 4678 container.go:407] Start housekeeping for container "/system.slice/docker-1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48.scope" I0125 05:12:28.333597 4678 audit.go:125] 2017-01-25T05:12:28.333550131-05:00 AUDIT: id="9f27aa01-7e4a-4586-b4f7-a37be19de94b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:28.334133 4678 audit.go:45] 2017-01-25T05:12:28.334119708-05:00 AUDIT: id="9f27aa01-7e4a-4586-b4f7-a37be19de94b" response="200" I0125 05:12:28.334579 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.369902ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:28.335030 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:12:28.335703 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11018", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Started' Started container with docker id 0b1e46b84aab I0125 05:12:28.336288 4678 audit.go:125] 2017-01-25T05:12:28.33625215-05:00 AUDIT: id="28f09996-ed0c-483d-b1df-b6f765754ae6" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:28.338811 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f.scope" E0125 05:12:28.339557 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master": symlink /var/log/containers/postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f.log: no such file or directory I0125 05:12:28.340765 4678 audit.go:45] 2017-01-25T05:12:28.340750794-05:00 AUDIT: id="28f09996-ed0c-483d-b1df-b6f765754ae6" response="201" I0125 05:12:28.340833 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.856667ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.340842 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d"} I0125 05:12:28.340906 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.340943 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:12:28.341086 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.341703 4678 audit.go:125] 2017-01-25T05:12:28.341668662-05:00 AUDIT: id="3507f249-e0b5-48d1-9198-3533211424dd" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy" I0125 05:12:28.343001 4678 audit.go:45] 2017-01-25T05:12:28.342983225-05:00 AUDIT: id="3507f249-e0b5-48d1-9198-3533211424dd" response="200" I0125 05:12:28.343097 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy: (1.688295ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.344080 4678 audit.go:125] 2017-01-25T05:12:28.344047798-05:00 AUDIT: id="cb36aba7-ee8a-4ba3-b397-a2fc916d4628" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status" I0125 05:12:28.348136 4678 audit.go:45] 2017-01-25T05:12:28.348120756-05:00 AUDIT: id="cb36aba7-ee8a-4ba3-b397-a2fc916d4628" response="200" I0125 05:12:28.348234 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status: (4.407262ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.348599 4678 config.go:281] Setting pods for source api I0125 05:12:28.348970 4678 replication_controller.go:378] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11045 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11069 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:28.349096 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-deploy, replication manager will avoid syncing I0125 05:12:28.349118 4678 replica_set.go:320] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11045 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11069 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:28.349188 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:28.349239 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-deploy, job controller will avoid syncing I0125 05:12:28.349263 4678 daemoncontroller.go:332] Pod postgresql-helper-1-deploy updated. I0125 05:12:28.349290 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-deploy, daemon set controller will avoid syncing I0125 05:12:28.349308 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-deploy" I0125 05:12:28.349322 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:28.349327 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-deploy" I0125 05:12:28.349397 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:28.350127 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.350276 4678 status_manager.go:425] Status for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Succeeded Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935917 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935948 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935917 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.5 StartTime:0xc4300476a0 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running: Terminated:0xc430bc8d90} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d}]} version:3 podName:postgresql-helper-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:28.352658 4678 audit.go:125] 2017-01-25T05:12:28.352620256-05:00 AUDIT: id="c2f62b6d-6080-4605-8855-d0d5aa17ee7c" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:12:28.356454 4678 audit.go:45] 2017-01-25T05:12:28.356437549-05:00 AUDIT: id="c2f62b6d-6080-4605-8855-d0d5aa17ee7c" response="200" I0125 05:12:28.357916 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (7.302587ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:28.358307 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:12:28.358447 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:28.358433912 -0500 EST. I0125 05:12:28.359669 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 status from Running to Complete (scale: 1) I0125 05:12:28.360054 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 1->1 I0125 05:12:28.360122 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (34.497µs) I0125 05:12:28.363317 4678 audit.go:125] 2017-01-25T05:12:28.363277237-05:00 AUDIT: id="5b8033f8-3c8a-452a-b031-229f84fbc83e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:12:28.363820 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper&resourceVersion=10980: (31.414184556s) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:28.365597 4678 audit.go:45] 2017-01-25T05:12:28.365580859-05:00 AUDIT: id="5b8033f8-3c8a-452a-b031-229f84fbc83e" response="200" I0125 05:12:28.365700 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (2.702483ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:12:28.366542 4678 audit.go:125] 2017-01-25T05:12:28.366502518-05:00 AUDIT: id="02bcc815-fd4d-4315-8c9a-f22b3f46e92d" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy" I0125 05:12:28.369704 4678 replication_controller.go:378] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11069 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11071 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp:2017-01-25 05:12:28.36790019 -0500 EST DeletionGracePeriodSeconds:0xc431ad2f88 Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:28.369792 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:12:28.36790019 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-helper-1]. I0125 05:12:28.369858 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-deploy, replication manager will avoid syncing I0125 05:12:28.369875 4678 replica_set.go:320] Pod postgresql-helper-1-deploy updated, objectMeta {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11069 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy UID:b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11071 Generation:0 CreationTimestamp:2017-01-25 05:11:57.011727446 -0500 EST DeletionTimestamp:2017-01-25 05:12:28.36790019 -0500 EST DeletionGracePeriodSeconds:0xc431ad2f88 Labels:map[openshift.io/deployer-pod-for.name:postgresql-helper-1] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:28.369922 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:12:28.36790019 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy", UID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11071", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935917, nsec:11727446, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc435da27e0), DeletionGracePeriodSeconds:(*int64)(0xc431ad2f88), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-helper-1"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-helper-1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4340f60f0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-helper-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4340f61e0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc431ad3030), ActiveDeadlineSeconds:(*int64)(0xc431ad3038), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc431a93880), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935917, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935948, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935917, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.5", StartTime:(*unversioned.Time)(0xc435da2a80), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc42e3aab60)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d"}}}}. I0125 05:12:28.370224 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:28.370251 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-deploy, job controller will avoid syncing I0125 05:12:28.370263 4678 daemoncontroller.go:332] Pod postgresql-helper-1-deploy updated. I0125 05:12:28.370285 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-deploy, daemon set controller will avoid syncing I0125 05:12:28.370300 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-deploy" I0125 05:12:28.370313 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:28.370319 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-deploy" I0125 05:12:28.370377 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:28.370683 4678 config.go:281] Setting pods for source api I0125 05:12:28.372225 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.373657 4678 audit.go:125] 2017-01-25T05:12:28.373619255-05:00 AUDIT: id="b6c6edd8-cd3d-4ec2-823b-68f484c53fa2" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy" I0125 05:12:28.377868 4678 audit.go:45] 2017-01-25T05:12:28.377848267-05:00 AUDIT: id="02bcc815-fd4d-4315-8c9a-f22b3f46e92d" response="200" I0125 05:12:28.377962 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy: (17.519986ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:28.378221 4678 audit.go:45] 2017-01-25T05:12:28.37819296-05:00 AUDIT: id="b6c6edd8-cd3d-4ec2-823b-68f484c53fa2" response="200" I0125 05:12:28.378285 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy: (4.936091ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.378833 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:12:28.36790019 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-helper-1]. I0125 05:12:28.378907 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-deploy, replication manager will avoid syncing I0125 05:12:28.378924 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:12:28.36790019 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy", UID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11072", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935917, nsec:11727446, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc4318c1120), DeletionGracePeriodSeconds:(*int64)(0xc42dcc0748), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-helper-1"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-helper-1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4330fa510), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-helper-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4330fa660), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc42dcc0800), ActiveDeadlineSeconds:(*int64)(0xc42dcc0808), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc433e4a400), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935917, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935948, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935917, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.5", StartTime:(*unversioned.Time)(0xc4318c13e0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc42f472930)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d"}}}}. I0125 05:12:28.379236 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:28.379267 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-deploy, job controller will avoid syncing I0125 05:12:28.379280 4678 daemoncontroller.go:367] Pod postgresql-helper-1-deploy deleted. I0125 05:12:28.379301 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-deploy, daemon set controller will avoid syncing I0125 05:12:28.379313 4678 disruption.go:355] deletePod called on pod "postgresql-helper-1-deploy" I0125 05:12:28.379325 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:28.379331 4678 disruption.go:358] No matching pdb for pod "postgresql-helper-1-deploy" I0125 05:12:28.379378 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:12:28.379397 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:28.380088 4678 config.go:281] Setting pods for source api I0125 05:12:28.381479 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.381539 4678 kubelet_pods.go:785] Killing unwanted pod "postgresql-helper-1-deploy" I0125 05:12:28.382287 4678 audit.go:125] 2017-01-25T05:12:28.382249018-05:00 AUDIT: id="0a8dd0eb-71de-43b3-b094-e2aa360552a2" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status" I0125 05:12:28.383616 4678 audit.go:45] 2017-01-25T05:12:28.383602122-05:00 AUDIT: id="0a8dd0eb-71de-43b3-b094-e2aa360552a2" response="409" I0125 05:12:28.383673 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-deploy/status: (1.723334ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] W0125 05:12:28.383939 4678 status_manager.go:451] Failed to update status for pod "_()": Operation cannot be fulfilled on pods "postgresql-helper-1-deploy": StorageError: invalid object, Code: 4, Key: kubernetes.io/pods/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094, UID in object meta: I0125 05:12:28.384280 4678 audit.go:125] 2017-01-25T05:12:28.384242258-05:00 AUDIT: id="5bcc67e8-c282-4a82-9a31-60d71af2dfde" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:12:28.387189 4678 audit.go:45] 2017-01-25T05:12:28.387175427-05:00 AUDIT: id="5bcc67e8-c282-4a82-9a31-60d71af2dfde" response="200" I0125 05:12:28.387335 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (26.195886ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:28.387711 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 2) I0125 05:12:28.388483 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:12:28.388588 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:28.388574813 -0500 EST. I0125 05:12:28.391229 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:28.393805 4678 docker_manager.go:1536] Killing container "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy" with 0 second grace period I0125 05:12:28.417538 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:28.418903 4678 audit.go:125] 2017-01-25T05:12:28.418854088-05:00 AUDIT: id="e32e977c-4f4b-4b7d-a81e-12ef2123ea61" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:28.420416 4678 audit.go:45] 2017-01-25T05:12:28.4204016-05:00 AUDIT: id="e32e977c-4f4b-4b7d-a81e-12ef2123ea61" response="200" I0125 05:12:28.420630 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (2.067382ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.421053 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:28.421318 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:28.421466 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:28.641384 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.642242 4678 audit.go:125] 2017-01-25T05:12:28.642161191-05:00 AUDIT: id="53013771-a7f3-473a-8b9c-e6910158f494" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:28.643627 4678 audit.go:45] 2017-01-25T05:12:28.643611388-05:00 AUDIT: id="53013771-a7f3-473a-8b9c-e6910158f494" response="200" I0125 05:12:28.643838 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.928576ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.644091 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.644161 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.644221 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:12:28.644247 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[]} I0125 05:12:28.644284 4678 docker_manager.go:2093] Killing Infra Container for "postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094)" because all other containers are dead. I0125 05:12:28.684715 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094) I0125 05:12:28.684776 4678 kubelet_pods.go:1029] Generating status for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.685005 4678 status_manager.go:312] Ignoring same status for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:29 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.18.7.222 StartTime:2017-01-25 03:41:09 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:router State:{Waiting: Running:0xc438430f80 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-haproxy-router:86a9783 ImageID:docker://sha256:0e944dc1f6ca904b8892fd8e5da5ec5cf13c0f673b44380cc81c1fdbc53b379e ContainerID:docker://38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018}]} I0125 05:12:28.685128 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.718367 4678 secret.go:179] Setting up volume server-certificate for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:12:28.718389 4678 secret.go:179] Setting up volume router-token-s79l8 for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:12:28.719189 4678 audit.go:125] 2017-01-25T05:12:28.719151536-05:00 AUDIT: id="7aa4e74f-032f-47cf-8030-9359f6e86013" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-certs" I0125 05:12:28.719189 4678 audit.go:125] 2017-01-25T05:12:28.719151591-05:00 AUDIT: id="49d85a7e-240b-411b-bdf4-264134b64558" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-token-s79l8" I0125 05:12:28.721090 4678 audit.go:45] 2017-01-25T05:12:28.721074996-05:00 AUDIT: id="49d85a7e-240b-411b-bdf4-264134b64558" response="200" I0125 05:12:28.721316 4678 audit.go:45] 2017-01-25T05:12:28.721305067-05:00 AUDIT: id="7aa4e74f-032f-47cf-8030-9359f6e86013" response="200" I0125 05:12:28.721494 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-token-s79l8: (2.571511ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.721556 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-certs: (2.685027ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.721795 4678 secret.go:206] Received secret default/router-certs containing (2) pieces of data, 6633 total bytes I0125 05:12:28.722176 4678 secret.go:206] Received secret default/router-token-s79l8 containing (4) pieces of data, 4105 total bytes I0125 05:12:28.722513 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume server-certificate: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:12:28.722532 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-server-certificate" (spec.Name: "server-certificate") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:28.722631 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume router-token-s79l8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:12:28.722645 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-router-token-s79l8" (spec.Name: "router-token-s79l8") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:28.843340 4678 manager.go:898] Added container: "/system.slice/docker-0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f.scope" (aliases: [k8s_postgresql-master.e7ea033_postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8_b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094_88f8e310 0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f], namespace: "docker") I0125 05:12:28.843580 4678 handler.go:325] Added event &{/system.slice/docker-0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f.scope 2017-01-25 05:12:28.149921023 -0500 EST containerCreation {}} I0125 05:12:28.843639 4678 container.go:407] Start housekeeping for container "/system.slice/docker-0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f.scope" I0125 05:12:28.862481 4678 docker_manager.go:1577] Container "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy" exited after 468.648735ms I0125 05:12:28.882388 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:28.884842 4678 docker_manager.go:1536] Killing container "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy" with 10 second grace period I0125 05:12:28.886308 4678 docker_manager.go:1577] Container "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy" exited after 1.438827ms W0125 05:12:28.886336 4678 docker_manager.go:1583] No ref for pod '"c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-deploy"' I0125 05:12:28.985422 4678 volume_manager.go:365] All volumes are attached and mounted for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.986334 4678 audit.go:125] 2017-01-25T05:12:28.98628542-05:00 AUDIT: id="f0341f61-6c01-410a-aa7f-67370636b9a4" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-dockercfg-g5x9s" I0125 05:12:28.987865 4678 audit.go:45] 2017-01-25T05:12:28.987847308-05:00 AUDIT: id="f0341f61-6c01-410a-aa7f-67370636b9a4" response="200" I0125 05:12:28.988111 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-dockercfg-g5x9s: (2.155224ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:28.988400 4678 docker_manager.go:1938] Found pod infra container for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.991043 4678 docker_manager.go:1951] Pod infra container looks good, keep it "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:28.991062 4678 docker_manager.go:1999] pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" container "router" exists as 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018 I0125 05:12:28.991258 4678 docker_manager.go:2086] Got container changes for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985:-1 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018:0]} I0125 05:12:29.031315 4678 generic.go:145] GenericPLEG: b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f: non-existent -> running I0125 05:12:29.031377 4678 generic.go:145] GenericPLEG: b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48: non-existent -> running I0125 05:12:29.031390 4678 generic.go:145] GenericPLEG: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b: running -> exited I0125 05:12:29.039084 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42d40e2c0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/ce4eccb1 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc433d60240 NetworkSettings:0xc434263e00} I0125 05:12:29.043581 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42d40e6e0 Mounts:[] Config:0xc433d605a0 NetworkSettings:0xc434263f00} I0125 05:12:29.045866 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42ca797a0), (*container.ContainerStatus)(0xc42ca79960)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:29.045983 4678 kubelet.go:1820] SyncLoop (PLEG): ignore irrelevant event: &pleg.PodLifecycleEvent{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b"} I0125 05:12:29.057087 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42d40eb00 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql-master/88f8e310 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/tmp/openshift-extended-tests/persistent-volumes816894978/0000099920249 Destination:/var/lib/pgsql/data Driver: Mode: RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate}] Config:0xc433d60d80 NetworkSettings:0xc42d418600} I0125 05:12:29.061773 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42d40f080 Mounts:[] Config:0xc433d61c20 NetworkSettings:0xc42d418700} I0125 05:12:29.064071 4678 generic.go:342] PLEG: Write status for postgresql-master-1-6jfgj/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-6jfgj", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.7", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42ca79b20), (*container.ContainerStatus)(0xc42ca79ce0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:29.064243 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.064448 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.064518 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f"} I0125 05:12:29.065308 4678 audit.go:125] 2017-01-25T05:12:29.065246297-05:00 AUDIT: id="cdb586e3-f3a3-49db-9ad7-e5b395276acc" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:29.066899 4678 audit.go:45] 2017-01-25T05:12:29.066883413-05:00 AUDIT: id="cdb586e3-f3a3-49db-9ad7-e5b395276acc" response="200" I0125 05:12:29.067040 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.107202ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.068193 4678 audit.go:125] 2017-01-25T05:12:29.068156545-05:00 AUDIT: id="72f9ce07-e43a-4199-9ae9-c9e6111ab227" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status" I0125 05:12:29.070937 4678 audit.go:45] 2017-01-25T05:12:29.070920173-05:00 AUDIT: id="72f9ce07-e43a-4199-9ae9-c9e6111ab227" response="200" I0125 05:12:29.071021 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status: (3.12114ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.072472 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11023 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11074 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:29.072684 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (39.631µs) I0125 05:12:29.072718 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11023 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11074 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:29.072826 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:12:29.072859 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:12:29.072883 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:12:29.072912 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:12:29.072937 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:12:29.072955 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:12:29.072960 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:12:29.073309 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:12:29.073724 4678 status_manager.go:425] Status for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.7 StartTime:0xc4283337a0 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc436ba61a0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f}]} version:2 podName:postgresql-master-1-6jfgj podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:29.074379 4678 config.go:281] Setting pods for source api I0125 05:12:29.075801 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.076240 4678 audit.go:125] 2017-01-25T05:12:29.076185389-05:00 AUDIT: id="c8d38e8a-e8cf-4ffd-a176-7e4aeb0436ca" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:29.078526 4678 audit.go:45] 2017-01-25T05:12:29.078510495-05:00 AUDIT: id="c8d38e8a-e8cf-4ffd-a176-7e4aeb0436ca" response="200" I0125 05:12:29.078600 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (4.569724ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:29.078927 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 1 I0125 05:12:29.081609 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc436830f20 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~empty-dir/postgresql-data Destination:/var/lib/pgsql/data Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql-slave/089e472a Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42f158ea0 NetworkSettings:0xc432644100} I0125 05:12:29.082003 4678 audit.go:125] 2017-01-25T05:12:29.081950096-05:00 AUDIT: id="29d4f483-a8fd-4774-b686-deb189e9c317" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:29.084505 4678 audit.go:45] 2017-01-25T05:12:29.084491665-05:00 AUDIT: id="29d4f483-a8fd-4774-b686-deb189e9c317" response="200" I0125 05:12:29.084571 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (5.209767ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:29.084828 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (11.803516ms) I0125 05:12:29.084966 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:29.085092 4678 proxier.go:804] Syncing iptables rules I0125 05:12:29.085104 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:29.099311 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:29.099424 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:29.099446 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:29.099456 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:29.099464 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:29.099480 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:29.099489 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:29.099501 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:12:29.116194 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:29.133163 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:29.135349 4678 audit.go:125] 2017-01-25T05:12:29.135276097-05:00 AUDIT: id="ffb96b09-c23c-4ea2-9ad3-64589ef49afa" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:12:29.136540 4678 audit.go:125] 2017-01-25T05:12:29.136503882-05:00 AUDIT: id="5203f4ed-1b85-4b83-a664-ff81dee677f1" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:12:29.140405 4678 audit.go:45] 2017-01-25T05:12:29.140387855-05:00 AUDIT: id="ffb96b09-c23c-4ea2-9ad3-64589ef49afa" response="200" I0125 05:12:29.140543 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (6.154119ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.140994 4678 audit.go:45] 2017-01-25T05:12:29.140981838-05:00 AUDIT: id="5203f4ed-1b85-4b83-a664-ff81dee677f1" response="200" I0125 05:12:29.141260 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (4.99846ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.141598 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:12:29.141881 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:29.142068 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:29.142733 4678 audit.go:125] 2017-01-25T05:12:29.142702903-05:00 AUDIT: id="e3cbe354-6965-4c39-9843-8be2f161dd82" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:29.144032 4678 audit.go:45] 2017-01-25T05:12:29.144019998-05:00 AUDIT: id="e3cbe354-6965-4c39-9843-8be2f161dd82" response="200" I0125 05:12:29.144127 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (1.65959ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.155361 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:29.171315 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc435afb1e0 Mounts:[] Config:0xc431158480 NetworkSettings:0xc433740400} I0125 05:12:29.176897 4678 audit.go:125] 2017-01-25T05:12:29.176859265-05:00 AUDIT: id="4bd45490-0a09-4cd1-a7a5-36c47517c883" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:12:29.178214 4678 audit.go:45] 2017-01-25T05:12:29.17818181-05:00 AUDIT: id="4bd45490-0a09-4cd1-a7a5-36c47517c883" response="200" I0125 05:12:29.178318 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (4.731905ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:29.178680 4678 controller.go:106] Found 0 cronjobs I0125 05:12:29.181275 4678 audit.go:125] 2017-01-25T05:12:29.181245981-05:00 AUDIT: id="81d8e472-d6a8-42a8-a429-8865fb208948" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:12:29.182415 4678 audit.go:45] 2017-01-25T05:12:29.182398322-05:00 AUDIT: id="81d8e472-d6a8-42a8-a429-8865fb208948" response="200" I0125 05:12:29.182498 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (3.565088ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:29.182819 4678 controller.go:114] Found 0 jobs I0125 05:12:29.182829 4678 controller.go:117] Found 0 groups I0125 05:12:29.185582 4678 generic.go:342] PLEG: Write status for postgresql-slave-1-qt1rc/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-slave-1-qt1rc", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.6", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42afe3340), (*container.ContainerStatus)(0xc42ac90fc0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:29.185667 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48"} I0125 05:12:29.185724 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.186066 4678 docker_manager.go:1536] Killing container "1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48 postgresql-slave extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc" with 30 second grace period I0125 05:12:29.186664 4678 audit.go:125] 2017-01-25T05:12:29.186629134-05:00 AUDIT: id="448ed6a1-cf14-4492-a4c9-9c20d2152862" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:29.186963 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:29.204744 4678 audit.go:45] 2017-01-25T05:12:29.204720597-05:00 AUDIT: id="448ed6a1-cf14-4492-a4c9-9c20d2152862" response="200" I0125 05:12:29.204978 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (18.608953ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.206534 4678 audit.go:125] 2017-01-25T05:12:29.206491746-05:00 AUDIT: id="6098ba0a-6b57-47cb-898b-2987654c40c1" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status" I0125 05:12:29.209574 4678 audit.go:45] 2017-01-25T05:12:29.209551824-05:00 AUDIT: id="6098ba0a-6b57-47cb-898b-2987654c40c1" response="200" I0125 05:12:29.209721 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status: (3.515328ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.210108 4678 status_manager.go:425] Status for pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-slave]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.6 StartTime:0xc425ae2000 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-slave State:{Waiting: Running:0xc42771f1c0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48}]} version:2 podName:postgresql-slave-1-qt1rc podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:29.210181 4678 status_manager.go:435] Pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" is terminated, but some containers are still running I0125 05:12:29.212214 4678 replication_controller.go:378] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11008 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc42d07d6d8 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11076 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc427f037e8 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:29.212308 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST, labels map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave]. I0125 05:12:29.212460 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc in state Running, deletion time 2017-01-25 05:12:31.005970671 -0500 EST I0125 05:12:29.212487 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (72.976µs) I0125 05:12:29.212527 4678 replica_set.go:320] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11008 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc42d07d6d8 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11076 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc427f037e8 Labels:map[name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:29.212588 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-qt1rc", GenerateName:"postgresql-slave-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11076", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935920, nsec:921802332, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc437fa3ae0), DeletionGracePeriodSeconds:(*int64)(0xc427f037e8), Labels:map[string]string{"deployment":"postgresql-slave-1", "deploymentconfig":"postgresql-slave", "name":"postgresql-slave", "app":"pg-replica-example"}, Annotations:map[string]string{"openshift.io/deployment-config.name":"postgresql-slave", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-slave-1\",\"uid\":\"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"10998\"}}\n", "openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc427f03880), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc437fbbaa0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-slave", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-slave"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_SERVICE_NAME", Value:"postgresql-master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc437fbbb30), ReadinessProbe:(*api.Probe)(0xc437fbbb60), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc437fbbb90), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc427f039a0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc433588300), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-slave]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.6", StartTime:(*unversioned.Time)(0xc437fa3e20), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-slave", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(0xc437fa3e40), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48"}}}}. I0125 05:12:29.212935 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:29.212977 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:29.213000 4678 daemoncontroller.go:332] Pod postgresql-slave-1-qt1rc updated. I0125 05:12:29.213041 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:29.213070 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:29.213086 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:29.213091 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:29.213530 4678 config.go:281] Setting pods for source api I0125 05:12:29.214939 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.215218 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:29.217746 4678 audit.go:125] 2017-01-25T05:12:29.217699787-05:00 AUDIT: id="5d51da7e-8152-4da6-a67d-078d0a9fc1a8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:29.218804 4678 audit.go:45] 2017-01-25T05:12:29.218788979-05:00 AUDIT: id="5d51da7e-8152-4da6-a67d-078d0a9fc1a8" response="200" I0125 05:12:29.218881 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.338922ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:29.219174 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:29.220144 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:29.235344 4678 audit.go:125] 2017-01-25T05:12:29.235256484-05:00 AUDIT: id="a0f57269-c348-4982-856e-683d84aac3e8" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:29.236508 4678 audit.go:45] 2017-01-25T05:12:29.236491485-05:00 AUDIT: id="a0f57269-c348-4982-856e-683d84aac3e8" response="200" I0125 05:12:29.236618 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (17.025072ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:29.237246 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (24.092747ms) I0125 05:12:29.255275 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:29.271394 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:29.271465 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:29.272168 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:29.272184 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:29.273504 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc435bca060 -1 [] true false map[] 0xc42f4cad20 } I0125 05:12:29.273567 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:29.273702 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc435bca140 -1 [] true false map[] 0xc42f4cab40 } I0125 05:12:29.273751 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:29.293407 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:29.309941 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:35727" with type 1 I0125 05:12:29.310034 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:29.310141 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:29.329358 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:29.348510 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:50245" with type 28 I0125 05:12:29.348629 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:29.348716 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:29.348900 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:50245" with type 1 I0125 05:12:29.348928 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:29.348950 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:29.368469 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.371918 4678 audit.go:125] 2017-01-25T05:12:29.371854748-05:00 AUDIT: id="0da7190d-5f78-47cf-b1fb-d6daaa27d98d" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:12:29.373865 4678 audit.go:45] 2017-01-25T05:12:29.373832173-05:00 AUDIT: id="0da7190d-5f78-47cf-b1fb-d6daaa27d98d" response="200" I0125 05:12:29.374161 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.611579ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:29.374579 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.374657 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:29.374676 4678 docker_manager.go:1999] pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master" exists as 0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f I0125 05:12:29.374807 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741:-1 0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f:0]} I0125 05:12:29.380127 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:29.399298 4678 audit.go:125] 2017-01-25T05:12:29.399232202-05:00 AUDIT: id="3173ea1c-1111-4cfe-b00c-04af8cabd18d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1" I0125 05:12:29.401665 4678 audit.go:45] 2017-01-25T05:12:29.401644422-05:00 AUDIT: id="3173ea1c-1111-4cfe-b00c-04af8cabd18d" response="200" I0125 05:12:29.402148 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1: (5.495281ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:29.411033 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:29.411070 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:29.443108 4678 proxier.go:797] syncProxyRules took 358.009308ms I0125 05:12:29.443145 4678 proxier.go:566] OnEndpointsUpdate took 358.116856ms for 6 endpoints I0125 05:12:29.443254 4678 proxier.go:381] Received update notice: [] I0125 05:12:29.443305 4678 proxier.go:804] Syncing iptables rules I0125 05:12:29.443316 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:29.473257 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:29.508186 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:29.544390 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:29.587810 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:29.620377 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:29.653185 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:29.686867 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:29.687182 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:29.715960 4678 kubelet_volumes.go:104] Orphaned pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" found, but volumes are not cleaned up I0125 05:12:29.725326 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:29.757724 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:29.757761 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:29.792715 4678 proxier.go:797] syncProxyRules took 349.402395ms I0125 05:12:29.792754 4678 proxier.go:431] OnServiceUpdate took 349.479867ms for 4 services I0125 05:12:30.174085 4678 panics.go:76] GET /api/v1/watch/endpoints?resourceVersion=10136&timeoutSeconds=480: (8m0.001157896s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:30.175320 4678 reflector.go:392] github.com/openshift/origin/pkg/cmd/server/kubernetes/node.go:272: Watch close - *api.Endpoints total 45 items received I0125 05:12:30.176144 4678 audit.go:125] 2017-01-25T05:12:30.17610031-05:00 AUDIT: id="6b46e381-e276-4fcb-8f70-39da659ae1c4" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/watch/endpoints?resourceVersion=11075&timeoutSeconds=442" I0125 05:12:30.176824 4678 audit.go:45] 2017-01-25T05:12:30.176808643-05:00 AUDIT: id="6b46e381-e276-4fcb-8f70-39da659ae1c4" response="200" I0125 05:12:30.193906 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:30.194133 4678 status_manager.go:312] Ignoring same status for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:} {Type:Ready Status:False LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.7 StartTime:2017-01-25 05:12:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc4349578c0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f}]} I0125 05:12:30.194295 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:30.278012 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:30.279503 4678 audit.go:125] 2017-01-25T05:12:30.279461483-05:00 AUDIT: id="83db7b2d-0d5c-47bf-ba6d-b66651502290" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:12:30.280148 4678 audit.go:125] 2017-01-25T05:12:30.280125304-05:00 AUDIT: id="d856444c-2b0e-4302-a5ac-5abf4e5ca9c7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:12:30.281219 4678 audit.go:45] 2017-01-25T05:12:30.281189573-05:00 AUDIT: id="d856444c-2b0e-4302-a5ac-5abf4e5ca9c7" response="200" I0125 05:12:30.281219 4678 audit.go:45] 2017-01-25T05:12:30.281189578-05:00 AUDIT: id="83db7b2d-0d5c-47bf-ba6d-b66651502290" response="200" I0125 05:12:30.281322 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (1.347626ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:30.281518 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (2.249157ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:30.281652 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:12:30.281891 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:12:30.282040 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:30.282107 4678 audit.go:125] 2017-01-25T05:12:30.282077631-05:00 AUDIT: id="68b0443a-392f-4176-8553-21398827989a" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:30.284061 4678 audit.go:45] 2017-01-25T05:12:30.284045806-05:00 AUDIT: id="68b0443a-392f-4176-8553-21398827989a" response="200" I0125 05:12:30.284161 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (2.323569ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:30.367351 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:44723" with type 1 I0125 05:12:30.367401 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:30.367434 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:30.380841 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:42681" with type 28 I0125 05:12:30.380912 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:30.380953 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:30.381074 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:42681" with type 1 I0125 05:12:30.381107 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:30.381127 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:30.407059 4678 audit.go:125] 2017-01-25T05:12:30.407006749-05:00 AUDIT: id="fcf6858f-0114-4603-b425-f14103551e6d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:12:30.408996 4678 audit.go:45] 2017-01-25T05:12:30.408977642-05:00 AUDIT: id="fcf6858f-0114-4603-b425-f14103551e6d" response="200" I0125 05:12:30.409505 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (4.516394ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:30.494607 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:30.495438 4678 audit.go:125] 2017-01-25T05:12:30.49538824-05:00 AUDIT: id="b19a58f3-6a41-4af6-b95f-3874d52cf07b" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:12:30.496896 4678 audit.go:45] 2017-01-25T05:12:30.496878666-05:00 AUDIT: id="b19a58f3-6a41-4af6-b95f-3874d52cf07b" response="200" I0125 05:12:30.497120 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.047087ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:30.497427 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:30.497516 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:30.497537 4678 docker_manager.go:1999] pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master" exists as 0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f I0125 05:12:30.497667 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f:0 bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741:-1]} I0125 05:12:30.579686 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") from pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:30.579812 4678 util.go:340] Tearing down volume deployer-token-r7jj8 for pod b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:30.580111 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:30.613888 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (OuterVolumeSpecName: "deployer-token-r7jj8") pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "deployer-token-r7jj8". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:12:30.932495 4678 status_manager.go:190] Container readiness unchanged (false): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" - "docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48" I0125 05:12:31.392716 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:49145" with type 1 I0125 05:12:31.392780 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:31.392816 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:31.400933 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:53889" with type 28 I0125 05:12:31.400976 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:31.401012 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:31.401041 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:53889" with type 1 I0125 05:12:31.401072 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:31.401103 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:31.413134 4678 audit.go:125] 2017-01-25T05:12:31.413081751-05:00 AUDIT: id="071e0707-6c84-439a-97a7-dab0f17fde76" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-helper-1" I0125 05:12:31.414703 4678 audit.go:45] 2017-01-25T05:12:31.414691647-05:00 AUDIT: id="071e0707-6c84-439a-97a7-dab0f17fde76" response="200" I0125 05:12:31.414949 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-helper-1: (3.610937ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:31.684603 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:31.689291 4678 kubelet_volumes.go:113] Orphaned pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:12:31.922727 4678 audit.go:125] 2017-01-25T05:12:31.922693202-05:00 AUDIT: id="bec25913-822a-46e9-9930-043de10425be" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:12:31.923864 4678 audit.go:45] 2017-01-25T05:12:31.923853101-05:00 AUDIT: id="bec25913-822a-46e9-9930-043de10425be" response="200" I0125 05:12:31.923937 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.45225ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:32.110243 4678 status_manager.go:190] Container readiness unchanged (false): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" - "docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f" I0125 05:12:32.410844 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:46228" with type 1 I0125 05:12:32.410915 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:32.410950 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:32.418732 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:60328" with type 28 I0125 05:12:32.418782 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:32.418815 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:32.418846 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:60328" with type 1 I0125 05:12:32.418878 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:32.418906 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:32.905374 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:32.959991 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:32.960019 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:12:32.967204 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:32.967227 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:32.967945 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:32.967963 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:32.968698 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc430876700 0 [] true false map[] 0xc42e52d3b0 } I0125 05:12:32.968747 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:32.968916 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc432719560 0 [] true false map[] 0xc42c0ae780 } I0125 05:12:32.968952 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:33.336027 4678 audit.go:125] 2017-01-25T05:12:33.33599306-05:00 AUDIT: id="045b50b9-a8fe-45e3-bb62-1f25d8aee3d8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:33.336481 4678 audit.go:45] 2017-01-25T05:12:33.336468074-05:00 AUDIT: id="045b50b9-a8fe-45e3-bb62-1f25d8aee3d8" response="200" I0125 05:12:33.336827 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.064179ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:33.419223 4678 audit.go:125] 2017-01-25T05:12:33.419169606-05:00 AUDIT: id="ed65b8c1-1bdc-4675-ba76-4865dbd3a613" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:33.420386 4678 audit.go:45] 2017-01-25T05:12:33.420371426-05:00 AUDIT: id="ed65b8c1-1bdc-4675-ba76-4865dbd3a613" response="200" I0125 05:12:33.420588 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (3.123669ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:33.432415 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:46808" with type 1 I0125 05:12:33.432477 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:33.432511 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:33.444068 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:50214" with type 28 I0125 05:12:33.444128 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:33.444163 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:33.444314 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:50214" with type 1 I0125 05:12:33.444347 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:33.444370 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:33.571565 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:12:33.571929 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (1.96µs) I0125 05:12:33.581148 4678 audit.go:125] 2017-01-25T05:12:33.581102666-05:00 AUDIT: id="5fd88aca-9331-46dd-b2f4-7deeb1d90e17" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:12:33.581674 4678 audit.go:125] 2017-01-25T05:12:33.581647077-05:00 AUDIT: id="a7da2b60-6b3c-4235-bf77-0c9e270bf25f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:12:33.581990 4678 audit.go:125] 2017-01-25T05:12:33.581953797-05:00 AUDIT: id="96c6b9dc-b671-401c-a207-61b7e4bbfbe6" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:33.583267 4678 audit.go:45] 2017-01-25T05:12:33.583252816-05:00 AUDIT: id="a7da2b60-6b3c-4235-bf77-0c9e270bf25f" response="200" I0125 05:12:33.583360 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (10.101224ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:33.583561 4678 audit.go:45] 2017-01-25T05:12:33.583545677-05:00 AUDIT: id="5fd88aca-9331-46dd-b2f4-7deeb1d90e17" response="200" I0125 05:12:33.583607 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (9.879878ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:33.583979 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (12.025126ms) I0125 05:12:33.584068 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (12.258987ms) I0125 05:12:33.584858 4678 audit.go:125] 2017-01-25T05:12:33.584823599-05:00 AUDIT: id="4b58d3a6-6c64-4b47-b382-5b9b538414d3" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:12:33.585160 4678 audit.go:125] 2017-01-25T05:12:33.585128989-05:00 AUDIT: id="1d81a06b-08af-48b4-955a-4069d74d42d2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:33.586839 4678 audit.go:45] 2017-01-25T05:12:33.586824736-05:00 AUDIT: id="4b58d3a6-6c64-4b47-b382-5b9b538414d3" response="200" I0125 05:12:33.586895 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (14.280233ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:33.587065 4678 audit.go:45] 2017-01-25T05:12:33.587053295-05:00 AUDIT: id="96c6b9dc-b671-401c-a207-61b7e4bbfbe6" response="200" I0125 05:12:33.587108 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (12.911151ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:33.587275 4678 audit.go:45] 2017-01-25T05:12:33.587264042-05:00 AUDIT: id="1d81a06b-08af-48b4-955a-4069d74d42d2" response="200" I0125 05:12:33.587317 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (12.61426ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:33.587654 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:33.587922 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (16.227169ms) I0125 05:12:33.588005 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (15.948844ms) I0125 05:12:33.590227 4678 audit.go:125] 2017-01-25T05:12:33.590174946-05:00 AUDIT: id="b08827c3-8108-414d-923c-288518703a5f" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:33.590800 4678 audit.go:45] 2017-01-25T05:12:33.590787184-05:00 AUDIT: id="b08827c3-8108-414d-923c-288518703a5f" response="200" I0125 05:12:33.590851 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (2.615719ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:33.591085 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (18.914004ms) I0125 05:12:33.636956 4678 audit.go:125] 2017-01-25T05:12:33.636918663-05:00 AUDIT: id="aba17bc3-cc54-42ae-94a4-7737873abcc0" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:33.638003 4678 audit.go:45] 2017-01-25T05:12:33.637993267-05:00 AUDIT: id="aba17bc3-cc54-42ae-94a4-7737873abcc0" response="200" I0125 05:12:33.638222 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.784431ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40550] I0125 05:12:33.640069 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:12:33.647429 4678 audit.go:125] 2017-01-25T05:12:33.647386474-05:00 AUDIT: id="7fdc53f2-3a2c-4aec-9f30-c78e6fb2d467" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true" I0125 05:12:33.648293 4678 audit.go:125] 2017-01-25T05:12:33.648270722-05:00 AUDIT: id="54a5b6b3-1993-4fd9-aec2-ac3567bb9893" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:33.649176 4678 audit.go:45] 2017-01-25T05:12:33.649165469-05:00 AUDIT: id="54a5b6b3-1993-4fd9-aec2-ac3567bb9893" response="200" I0125 05:12:33.649283 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (1.200165ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:33.649573 4678 admission.go:77] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:33.649610 4678 admission.go:88] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:33.650073 4678 audit.go:125] 2017-01-25T05:12:33.650051305-05:00 AUDIT: id="207378fc-5e4c-4941-8711-c234bf685b30" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:33.650891 4678 audit.go:45] 2017-01-25T05:12:33.650878908-05:00 AUDIT: id="207378fc-5e4c-4941-8711-c234bf685b30" response="200" I0125 05:12:33.650937 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.056232ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:33.651227 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:33.651239 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:33.651243 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:33.651258 4678 admission.go:149] validating pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) against providers restricted I0125 05:12:33.651328 4678 admission.go:116] pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) validated against provider restricted I0125 05:12:33.662670 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master"} I0125 05:12:33.663382 4678 audit.go:125] 2017-01-25T05:12:33.663356325-05:00 AUDIT: id="1934fb03-4bbb-4f4e-978c-f7accce50d2f" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/oapi/v1/subjectaccessreviews" I0125 05:12:33.663959 4678 audit.go:45] 2017-01-25T05:12:33.663948299-05:00 AUDIT: id="1934fb03-4bbb-4f4e-978c-f7accce50d2f" response="201" I0125 05:12:33.664016 4678 panics.go:76] POST /oapi/v1/subjectaccessreviews: (848.362µs) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50864] I0125 05:12:33.664326 4678 authorizer.go:69] allowed=true, reason=allowed by cluster rule I0125 05:12:33.685371 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:33.729485 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&error=1&output=1: (67.012001ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41672] I0125 05:12:33.731956 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true: (86.129363ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40552] I0125 05:12:33.755937 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:12:33.755969 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:12:33.755955 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:12:33.781210 4678 reflector.go:273] github.com/openshift/origin/pkg/project/controller/factory.go:36: forcing resync I0125 05:12:33.785295 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:12:33.825490 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:12:33.986531 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:12:34.007450 4678 gc_controller.go:175] GC'ing orphaned I0125 05:12:34.007475 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:12:34.131514 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:12:34.131586 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:12:34.131606 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:34.131625 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:34.131633 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:12:34.131640 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:34.131672 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:34.131691 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:12:34.131740 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:12:34.131759 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:34.131768 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:34.131766 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:34.131776 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:34.131782 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:12:34.131782 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:34.131796 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:34.131808 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:12:34.131813 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:34.131820 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:34.131828 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:34.131837 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:12:34.131866 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:12:34.131871 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:34.131883 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:34.131890 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:34.131942 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:12:34.456295 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:33047" with type 1 I0125 05:12:34.456341 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:34.456370 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:34.464451 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:51993" with type 28 I0125 05:12:34.464495 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:34.464533 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:34.464588 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:51993" with type 1 I0125 05:12:34.464621 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:34.464648 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:35.221652 4678 audit.go:125] 2017-01-25T05:12:35.221620039-05:00 AUDIT: id="e116fab7-7278-4623-8c03-3e466ecef72a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:12:35.222651 4678 audit.go:45] 2017-01-25T05:12:35.222640429-05:00 AUDIT: id="e116fab7-7278-4623-8c03-3e466ecef72a" response="200" I0125 05:12:35.222724 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.323556ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:35.474416 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:44141" with type 1 I0125 05:12:35.474463 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:35.474498 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:35.482439 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:56678" with type 1 I0125 05:12:35.482497 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:35.482527 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:35.482608 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:56678" with type 28 I0125 05:12:35.482640 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:35.482657 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:35.684615 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:35.735287 4678 audit.go:125] 2017-01-25T05:12:35.735238872-05:00 AUDIT: id="2e4d8c2d-487a-4d9e-bc13-77b4b4e1deff" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:35.736239 4678 audit.go:45] 2017-01-25T05:12:35.736224233-05:00 AUDIT: id="2e4d8c2d-487a-4d9e-bc13-77b4b4e1deff" response="200" I0125 05:12:35.736451 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (2.650614ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:35.927767 4678 audit.go:125] 2017-01-25T05:12:35.927725471-05:00 AUDIT: id="a940c925-c4da-442e-85a9-b48f84c8c5e3" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:35.928769 4678 audit.go:45] 2017-01-25T05:12:35.928753584-05:00 AUDIT: id="a940c925-c4da-442e-85a9-b48f84c8c5e3" response="200" I0125 05:12:35.928983 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (2.641778ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40558] I0125 05:12:35.937847 4678 audit.go:125] 2017-01-25T05:12:35.937801716-05:00 AUDIT: id="aec0af8c-4754-405a-a193-ee99a8f6a808" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql&container=postgresql&stderr=true&stdout=true" I0125 05:12:35.938696 4678 audit.go:125] 2017-01-25T05:12:35.938673449-05:00 AUDIT: id="e0b75728-1ba7-4394-afea-0caf1f4e9319" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:35.939580 4678 audit.go:45] 2017-01-25T05:12:35.939568206-05:00 AUDIT: id="e0b75728-1ba7-4394-afea-0caf1f4e9319" response="200" I0125 05:12:35.939682 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (1.184534ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:35.939979 4678 admission.go:77] getting security context constraints for pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:35.940007 4678 admission.go:88] getting security context constraints for pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:35.940507 4678 audit.go:125] 2017-01-25T05:12:35.94048423-05:00 AUDIT: id="a0b772cf-eccc-43c5-a2b4-0cff7c28da6d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:35.941365 4678 audit.go:45] 2017-01-25T05:12:35.941355568-05:00 AUDIT: id="a0b772cf-eccc-43c5-a2b4-0cff7c28da6d" response="200" I0125 05:12:35.941409 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.10556ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:35.941638 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:35.941646 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:35.941651 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:35.941680 4678 admission.go:149] validating pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) against providers restricted I0125 05:12:35.941735 4678 admission.go:116] pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) validated against provider restricted I0125 05:12:35.953555 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/postgresql"} I0125 05:12:35.954218 4678 audit.go:125] 2017-01-25T05:12:35.95418265-05:00 AUDIT: id="7a28f5b7-86e6-43fa-89ed-bf4ffa295a49" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/oapi/v1/subjectaccessreviews" I0125 05:12:35.954781 4678 audit.go:45] 2017-01-25T05:12:35.954767251-05:00 AUDIT: id="7a28f5b7-86e6-43fa-89ed-bf4ffa295a49" response="201" I0125 05:12:35.954835 4678 panics.go:76] POST /oapi/v1/subjectaccessreviews: (824.343µs) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50864] I0125 05:12:35.955087 4678 authorizer.go:69] allowed=true, reason=allowed by cluster rule I0125 05:12:36.016589 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/postgresql?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&error=1&output=1: (63.216222ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41680] E0125 05:12:36.016998 4678 proxy.go:193] Error proxying data from client to backend: write tcp 172.18.7.222:41680->172.18.7.222:10250: write: broken pipe I0125 05:12:36.017093 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql&container=postgresql&stderr=true&stdout=true: (80.646363ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40560] I0125 05:12:36.343134 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:36.343154 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:36.449005 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:12:36.492892 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:39276" with type 1 I0125 05:12:36.492940 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:36.492972 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:36.500946 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:47908" with type 28 I0125 05:12:36.500992 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:36.501019 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:36.501079 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:47908" with type 1 I0125 05:12:36.501110 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:36.501138 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:36.796988 4678 worker.go:45] 0 Health Check Listeners I0125 05:12:36.797017 4678 worker.go:46] 3 Services registered for health checking I0125 05:12:36.797024 4678 worker.go:50] Service default/docker-registry has 1 local endpoints I0125 05:12:36.797029 4678 worker.go:50] Service default/router has 1 local endpoints I0125 05:12:36.797033 4678 worker.go:50] Service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper has 1 local endpoints I0125 05:12:37.072671 4678 proxier.go:804] Syncing iptables rules I0125 05:12:37.072707 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:37.091913 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:37.111309 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:37.130606 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:37.150054 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:37.169385 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:37.188715 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:37.207874 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:37.227069 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:37.250151 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:37.250187 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:37.271812 4678 proxier.go:797] syncProxyRules took 199.137859ms I0125 05:12:37.271857 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:12:37.289724 4678 audit.go:125] 2017-01-25T05:12:37.289669259-05:00 AUDIT: id="3f8e981c-feaa-4505-953b-eac67da19580" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:12:37.290308 4678 audit.go:45] 2017-01-25T05:12:37.29029481-05:00 AUDIT: id="3f8e981c-feaa-4505-953b-eac67da19580" response="200" I0125 05:12:37.290712 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.329395ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:37.291792 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:12:37.325747 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:12:37.354310 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:12:37.386485 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:12:37.400134 4678 audit.go:125] 2017-01-25T05:12:37.40008971-05:00 AUDIT: id="fe2dece3-4497-4025-a8fb-7a42c6ef04d7" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:12:37.404275 4678 audit.go:45] 2017-01-25T05:12:37.404252444-05:00 AUDIT: id="fe2dece3-4497-4025-a8fb-7a42c6ef04d7" response="200" I0125 05:12:37.405285 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (5.55263ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:37.406292 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:12:37.409929 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:12:37.429549 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:12:37.448922 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:12:37.468044 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:12:37.486730 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:12:37.512199 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:50634" with type 1 I0125 05:12:37.512271 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:37.512319 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:37.520437 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:38593" with type 28 I0125 05:12:37.520489 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:37.520523 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:37.520574 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:38593" with type 1 I0125 05:12:37.520619 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:37.520647 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:37.684625 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:38.066033 4678 audit.go:125] 2017-01-25T05:12:38.065979352-05:00 AUDIT: id="8c38d48a-5cc9-4aff-a91a-fc56680aa351" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:38.067517 4678 audit.go:45] 2017-01-25T05:12:38.067501415-05:00 AUDIT: id="8c38d48a-5cc9-4aff-a91a-fc56680aa351" response="200" I0125 05:12:38.067968 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (5.12174ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:38.234114 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-mqueue.mount: invalid container name I0125 05:12:38.234145 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-mqueue.mount" I0125 05:12:38.234157 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-mqueue.mount", but ignoring. I0125 05:12:38.234167 4678 manager.go:867] ignoring container "/system.slice/dev-mqueue.mount" I0125 05:12:38.234178 4678 factory.go:104] Error trying to work out if we can handle /system.slice/run-user-1000.mount: invalid container name I0125 05:12:38.234183 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/run-user-1000.mount" I0125 05:12:38.234190 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/run-user-1000.mount", but ignoring. I0125 05:12:38.234216 4678 manager.go:867] ignoring container "/system.slice/run-user-1000.mount" I0125 05:12:38.234230 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-hugepages.mount: invalid container name I0125 05:12:38.234235 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-hugepages.mount" I0125 05:12:38.234241 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-hugepages.mount", but ignoring. I0125 05:12:38.234251 4678 manager.go:867] ignoring container "/system.slice/dev-hugepages.mount" I0125 05:12:38.234288 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b39c8e4d\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-deployer\x2dtoken\x2dr7jj8.mount: invalid container name I0125 05:12:38.234300 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b39c8e4d\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:12:38.234324 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b39c8e4d\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount", but ignoring. I0125 05:12:38.234339 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b39c8e4d\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:12:38.234382 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b3924f08\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-deployer\x2dtoken\x2dr7jj8.mount: invalid container name I0125 05:12:38.234387 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3924f08\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:12:38.234399 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3924f08\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount", but ignoring. I0125 05:12:38.234413 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b3924f08\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:12:38.234456 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-e932e61a\x2de2d9\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-registry\x2dtoken\x2dvjbst.mount: invalid container name I0125 05:12:38.234461 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:12:38.234472 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount", but ignoring. I0125 05:12:38.234485 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:12:38.234506 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir.mount: invalid container name I0125 05:12:38.234510 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:12:38.234517 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount", but ignoring. I0125 05:12:38.234525 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:12:38.234536 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-debug.mount: invalid container name I0125 05:12:38.234540 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-debug.mount" I0125 05:12:38.234546 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-debug.mount", but ignoring. I0125 05:12:38.234557 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-debug.mount" I0125 05:12:38.234595 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b6efaf68\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:12:38.234600 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b6efaf68\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:38.234612 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b6efaf68\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:12:38.234644 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b6efaf68\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:38.234689 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b76687cc\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:12:38.234694 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:38.234706 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:12:38.234719 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:38.234734 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-config.mount: invalid container name I0125 05:12:38.234739 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-config.mount" I0125 05:12:38.234745 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-config.mount", but ignoring. I0125 05:12:38.234753 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-config.mount" I0125 05:12:38.234788 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b63d7ff7\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:12:38.234793 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b63d7ff7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:38.234804 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b63d7ff7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:12:38.234822 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b63d7ff7\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:12:38.234862 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-router\x2dtoken\x2ds79l8.mount: invalid container name I0125 05:12:38.234870 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:12:38.234882 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount", but ignoring. I0125 05:12:38.234894 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:12:38.234906 4678 factory.go:104] Error trying to work out if we can handle /system.slice/-.mount: invalid container name I0125 05:12:38.234911 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/-.mount" I0125 05:12:38.234917 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/-.mount", but ignoring. I0125 05:12:38.234924 4678 manager.go:867] ignoring container "/system.slice/-.mount" I0125 05:12:38.234965 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-server\x2dcertificate.mount: invalid container name I0125 05:12:38.234970 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:12:38.234981 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount", but ignoring. I0125 05:12:38.234995 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:12:38.235022 4678 manager.go:955] Destroyed container: "/system.slice/docker-57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d.scope" (aliases: [k8s_deployment.440ed38e_postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094_ce4eccb1 57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d], namespace: "docker") I0125 05:12:38.235051 4678 handler.go:325] Added event &{/system.slice/docker-57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d.scope 2017-01-25 05:12:38.235041089 -0500 EST containerDeletion {}} I0125 05:12:38.235085 4678 manager.go:955] Destroyed container: "/system.slice/docker-c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b.scope" (aliases: [k8s_POD.f321dce3_postgresql-helper-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094_ae1dbcf3 c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b], namespace: "docker") I0125 05:12:38.235101 4678 handler.go:325] Added event &{/system.slice/docker-c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b.scope 2017-01-25 05:12:38.235093956 -0500 EST containerDeletion {}} I0125 05:12:38.235114 4678 manager.go:349] Global Housekeeping(1485339158) took 108.6809ms I0125 05:12:38.341474 4678 audit.go:125] 2017-01-25T05:12:38.34143137-05:00 AUDIT: id="b990a982-b8e5-4d1c-9b6f-cf5c52af8706" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:38.341981 4678 audit.go:45] 2017-01-25T05:12:38.341968613-05:00 AUDIT: id="b990a982-b8e5-4d1c-9b6f-cf5c52af8706" response="200" I0125 05:12:38.342363 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.269631ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:38.342775 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:12:38.377117 4678 audit.go:125] 2017-01-25T05:12:38.377078027-05:00 AUDIT: id="3d52fb46-ff05-42b2-b600-abd6d10cd7da" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:38.378245 4678 audit.go:45] 2017-01-25T05:12:38.378234371-05:00 AUDIT: id="3d52fb46-ff05-42b2-b600-abd6d10cd7da" response="200" I0125 05:12:38.378612 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (3.181804ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40566] I0125 05:12:38.387647 4678 audit.go:125] 2017-01-25T05:12:38.38759647-05:00 AUDIT: id="4f41c281-de3e-43a6-8d17-2c8dc8d3e3c7" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true" I0125 05:12:38.388496 4678 audit.go:125] 2017-01-25T05:12:38.388473499-05:00 AUDIT: id="35562c33-9bd4-4c50-9a42-18ad0147616c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:38.389411 4678 audit.go:45] 2017-01-25T05:12:38.389400902-05:00 AUDIT: id="35562c33-9bd4-4c50-9a42-18ad0147616c" response="200" I0125 05:12:38.389502 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (1.218217ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:38.389790 4678 admission.go:77] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:38.389821 4678 admission.go:88] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:38.390273 4678 audit.go:125] 2017-01-25T05:12:38.390248317-05:00 AUDIT: id="831509a6-28b8-4018-ae6e-01c7ca5e3799" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:38.391044 4678 audit.go:45] 2017-01-25T05:12:38.391034398-05:00 AUDIT: id="831509a6-28b8-4018-ae6e-01c7ca5e3799" response="200" I0125 05:12:38.391099 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.022204ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:38.391335 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:38.391350 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:38.391354 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:38.391369 4678 admission.go:149] validating pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) against providers restricted I0125 05:12:38.391414 4678 admission.go:116] pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) validated against provider restricted I0125 05:12:38.402927 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave"} I0125 05:12:38.403594 4678 audit.go:125] 2017-01-25T05:12:38.403568811-05:00 AUDIT: id="b2e20b43-7814-4822-9af8-10e9fe5143c2" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/oapi/v1/subjectaccessreviews" I0125 05:12:38.404110 4678 audit.go:45] 2017-01-25T05:12:38.404096858-05:00 AUDIT: id="b2e20b43-7814-4822-9af8-10e9fe5143c2" response="201" I0125 05:12:38.404184 4678 panics.go:76] POST /oapi/v1/subjectaccessreviews: (794.663µs) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50864] I0125 05:12:38.404447 4678 authorizer.go:69] allowed=true, reason=allowed by cluster rule I0125 05:12:38.458230 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&error=1&output=1: (55.514589ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41688] E0125 05:12:38.458758 4678 proxy.go:193] Error proxying data from client to backend: write tcp 172.18.7.222:41688->172.18.7.222:10250: write: broken pipe I0125 05:12:38.458865 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true: (72.638126ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40568] I0125 05:12:38.530504 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:42009" with type 1 I0125 05:12:38.530553 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:38.530591 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:38.538321 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:57448" with type 28 I0125 05:12:38.538373 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:38.538402 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:57448" with type 1 I0125 05:12:38.538407 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:38.538434 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:38.538456 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:39.185507 4678 audit.go:125] 2017-01-25T05:12:39.185457736-05:00 AUDIT: id="ccdc6b13-5db7-4883-9e08-283942b9a4b2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:12:39.186364 4678 audit.go:45] 2017-01-25T05:12:39.186349145-05:00 AUDIT: id="ccdc6b13-5db7-4883-9e08-283942b9a4b2" response="200" I0125 05:12:39.186443 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.964514ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:39.186714 4678 controller.go:106] Found 0 cronjobs I0125 05:12:39.188681 4678 audit.go:125] 2017-01-25T05:12:39.188658573-05:00 AUDIT: id="c91812c3-53e1-4adc-bf88-3dd09290c055" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:12:39.189495 4678 audit.go:45] 2017-01-25T05:12:39.189485547-05:00 AUDIT: id="c91812c3-53e1-4adc-bf88-3dd09290c055" response="200" I0125 05:12:39.189556 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.586106ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:39.189767 4678 controller.go:114] Found 0 jobs I0125 05:12:39.189775 4678 controller.go:117] Found 0 groups I0125 05:12:39.261015 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:39.261040 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:39.261740 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:39.261759 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:39.262707 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc4280c65a0 -1 [] true false map[] 0xc42fc88780 } I0125 05:12:39.262768 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:39.262859 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc4280c6680 -1 [] true false map[] 0xc42ee97d10 } I0125 05:12:39.262885 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:39.482673 4678 panics.go:76] GET /oapi/v1/watch/imagestreams?resourceVersion=10086&timeoutSeconds=306: (5m6.001099713s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:39.482906 4678 reflector.go:392] github.com/openshift/origin/pkg/controller/shared/shared_informer.go:89: Watch close - *api.ImageStream total 5 items received I0125 05:12:39.483539 4678 audit.go:125] 2017-01-25T05:12:39.483507328-05:00 AUDIT: id="dfa4a05c-d2d3-450d-8b0a-e10c96378c39" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/watch/imagestreams?resourceVersion=10739&timeoutSeconds=451" I0125 05:12:39.484027 4678 audit.go:45] 2017-01-25T05:12:39.484015114-05:00 AUDIT: id="dfa4a05c-d2d3-450d-8b0a-e10c96378c39" response="200" I0125 05:12:39.548329 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:50771" with type 1 I0125 05:12:39.548380 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:39.548415 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:39.556274 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:42395" with type 28 I0125 05:12:39.556323 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:39.556362 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:39.556392 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:42395" with type 1 I0125 05:12:39.556422 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:39.556449 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:39.604443 4678 panics.go:76] GET /api/v1/watch/secrets?resourceVersion=10082&timeoutSeconds=324: (5m24.001073074s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:39.604695 4678 reflector.go:392] github.com/openshift/origin/pkg/quota/controller/clusterquotareconciliation/reconciliation_controller.go:120: Watch close - *api.Secret total 127 items received I0125 05:12:39.605295 4678 audit.go:125] 2017-01-25T05:12:39.605266417-05:00 AUDIT: id="593a8c4d-d87d-4c4f-88a3-0563c53368b5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/secrets?resourceVersion=10974&timeoutSeconds=391" I0125 05:12:39.605746 4678 audit.go:45] 2017-01-25T05:12:39.605727777-05:00 AUDIT: id="593a8c4d-d87d-4c4f-88a3-0563c53368b5" response="200" I0125 05:12:39.684609 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:40.022544 4678 audit.go:125] 2017-01-25T05:12:40.022507601-05:00 AUDIT: id="f20f2f77-42b3-4386-8642-e6141e18ef6b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:40.023568 4678 audit.go:45] 2017-01-25T05:12:40.023556949-05:00 AUDIT: id="f20f2f77-42b3-4386-8642-e6141e18ef6b" response="200" I0125 05:12:40.023926 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (3.006065ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:40.029587 4678 panics.go:76] GET /api/v1/watch/services?resourceVersion=9561&timeoutSeconds=591: (9m51.003132643s) 200 [[openshift-router/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:60456] I0125 05:12:40.032447 4678 audit.go:125] 2017-01-25T05:12:40.032403389-05:00 AUDIT: id="e1ed004e-5423-44f1-81f0-941012bdc2bd" ip="172.18.7.222" method="GET" user="system:serviceaccount:default:router" as="" asgroups="" namespace="" uri="/api/v1/watch/services?resourceVersion=10976&timeoutSeconds=534" I0125 05:12:40.032926 4678 audit.go:45] 2017-01-25T05:12:40.032909371-05:00 AUDIT: id="e1ed004e-5423-44f1-81f0-941012bdc2bd" response="200" I0125 05:12:40.215436 4678 audit.go:125] 2017-01-25T05:12:40.215401372-05:00 AUDIT: id="d6ccdb71-be4e-4c41-ab35-3357de35fc56" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:40.216464 4678 audit.go:45] 2017-01-25T05:12:40.216452591-05:00 AUDIT: id="d6ccdb71-be4e-4c41-ab35-3357de35fc56" response="200" I0125 05:12:40.216802 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (2.947562ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40582] I0125 05:12:40.225518 4678 audit.go:125] 2017-01-25T05:12:40.225474411-05:00 AUDIT: id="35f17eb4-69e7-41fe-8aee-be6cbae67b51" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true" I0125 05:12:40.226367 4678 audit.go:125] 2017-01-25T05:12:40.226335521-05:00 AUDIT: id="a75b9270-cccc-4d00-aed9-f47803aaa02d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:40.227303 4678 audit.go:45] 2017-01-25T05:12:40.227287114-05:00 AUDIT: id="a75b9270-cccc-4d00-aed9-f47803aaa02d" response="200" I0125 05:12:40.227393 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (1.24803ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:40.227671 4678 admission.go:77] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:40.227697 4678 admission.go:88] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:40.228175 4678 audit.go:125] 2017-01-25T05:12:40.228153919-05:00 AUDIT: id="6eb860e2-e474-40f1-8100-a9081f0b5a91" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:40.228978 4678 audit.go:45] 2017-01-25T05:12:40.228968653-05:00 AUDIT: id="6eb860e2-e474-40f1-8100-a9081f0b5a91" response="200" I0125 05:12:40.229027 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.030822ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:40.229229 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:40.229237 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:40.229242 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:40.229263 4678 admission.go:149] validating pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) against providers restricted I0125 05:12:40.229304 4678 admission.go:116] pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) validated against provider restricted I0125 05:12:40.240746 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave"} I0125 05:12:40.303984 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&error=1&output=1: (63.431088ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41704] I0125 05:12:40.305413 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true: (81.288952ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40584] I0125 05:12:40.566611 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:60652" with type 1 I0125 05:12:40.566664 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:40.566696 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:40.574428 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:53568" with type 28 I0125 05:12:40.574468 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:40.574504 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:40.574570 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:53568" with type 1 I0125 05:12:40.574591 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:40.574621 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:40.932532 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-slave centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-slave] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_SERVICE_NAME postgresql-master } {POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc42cb02330 0xc42cb02360 /dev/termination-log IfNotPresent 0xc42cb02390 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:41.007332 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\npsql: could not connect to server: Connection refused\n\tIs the server running on host \"127.0.0.1\" and accepting\n\tTCP/IP connections on port 5432?\n" I0125 05:12:41.007381 4678 prober.go:106] Readiness probe for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-slave" failed (failure): sh: cannot set terminal process group (-1): Inappropriate ioctl for device sh: no job control in this shell psql: could not connect to server: Connection refused Is the server running on host "127.0.0.1" and accepting TCP/IP connections on port 5432? I0125 05:12:41.007443 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11002", FieldPath:"spec.containers{postgresql-slave}"}): type: 'Warning' reason: 'Unhealthy' Readiness probe failed: sh: cannot set terminal process group (-1): Inappropriate ioctl for device sh: no job control in this shell psql: could not connect to server: Connection refused Is the server running on host "127.0.0.1" and accepting TCP/IP connections on port 5432? I0125 05:12:41.008173 4678 audit.go:125] 2017-01-25T05:12:41.008142222-05:00 AUDIT: id="c3153c1b-2a3f-4ef5-bde7-56940fd6a4fa" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:41.010388 4678 audit.go:45] 2017-01-25T05:12:41.010374083-05:00 AUDIT: id="c3153c1b-2a3f-4ef5-bde7-56940fd6a4fa" response="201" I0125 05:12:41.010438 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.507135ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:41.584544 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:33401" with type 1 I0125 05:12:41.584591 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:41.584621 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:41.592554 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:35437" with type 28 I0125 05:12:41.592598 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:41.592626 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:41.592700 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:35437" with type 1 I0125 05:12:41.592732 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:41.592755 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{} I0125 05:12:41.684614 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:41.925047 4678 audit.go:125] 2017-01-25T05:12:41.925014103-05:00 AUDIT: id="8b6d29ab-f9dc-445b-97e7-52f84b9ae932" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:12:41.926143 4678 audit.go:45] 2017-01-25T05:12:41.9261274-05:00 AUDIT: id="8b6d29ab-f9dc-445b-97e7-52f84b9ae932" response="200" I0125 05:12:41.926240 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.432364ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:42.022600 4678 audit.go:125] 2017-01-25T05:12:42.022553921-05:00 AUDIT: id="afbf6e8a-5cf9-491a-b431-ae09e6da31c1" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:42.023642 4678 audit.go:45] 2017-01-25T05:12:42.02363063-05:00 AUDIT: id="afbf6e8a-5cf9-491a-b431-ae09e6da31c1" response="200" I0125 05:12:42.023988 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (3.081143ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:42.110369 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD X5NgRSrwacHP }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc4269cff50 0xc4269cff80 /dev/termination-log IfNotPresent 0xc4269cffb0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:42.175392 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:42.175424 4678 prober.go:113] Readiness probe for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:12:42.176241 4678 audit.go:125] 2017-01-25T05:12:42.176181887-05:00 AUDIT: id="96395669-1573-419f-8985-bd77cd60ee4a" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:42.177570 4678 audit.go:45] 2017-01-25T05:12:42.177555385-05:00 AUDIT: id="96395669-1573-419f-8985-bd77cd60ee4a" response="200" I0125 05:12:42.177686 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (1.790376ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:42.178786 4678 audit.go:125] 2017-01-25T05:12:42.178748428-05:00 AUDIT: id="e5724c27-227c-494f-81f8-ab0b962939af" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status" I0125 05:12:42.181260 4678 audit.go:45] 2017-01-25T05:12:42.181244736-05:00 AUDIT: id="e5724c27-227c-494f-81f8-ab0b962939af" response="200" I0125 05:12:42.181352 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status: (2.852917ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:42.182665 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11074 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11080 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:42.182876 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 1->1 (need 1), fullyLabeledReplicas 1->1, readyReplicas 0->1, availableReplicas 0->1, sequence No: 2->2 I0125 05:12:42.183182 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11074 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11080 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} ] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:42.183315 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:12:42.183348 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:12:42.183375 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:12:42.183404 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:12:42.183427 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:12:42.183444 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:12:42.183450 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:12:42.183713 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:12:42.183869 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:42.183857928 -0500 EST. I0125 05:12:42.184434 4678 status_manager.go:425] Status for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935962 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.7 StartTime:0xc4283337a0 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc42b7757a0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f}]} version:3 podName:postgresql-master-1-6jfgj podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:42.184629 4678 config.go:281] Setting pods for source api I0125 05:12:42.186124 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:42.190359 4678 audit.go:125] 2017-01-25T05:12:42.190314797-05:00 AUDIT: id="aea92c58-8f4e-42bf-861e-2efc96110d15" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:12:42.191537 4678 audit.go:125] 2017-01-25T05:12:42.19149956-05:00 AUDIT: id="82c6c6a1-4d3c-4506-877f-2452fc603789" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:42.194344 4678 audit.go:45] 2017-01-25T05:12:42.194329497-05:00 AUDIT: id="82c6c6a1-4d3c-4506-877f-2452fc603789" response="200" I0125 05:12:42.194421 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (7.122747ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:42.194669 4678 audit.go:45] 2017-01-25T05:12:42.194656658-05:00 AUDIT: id="aea92c58-8f4e-42bf-861e-2efc96110d15" response="200" I0125 05:12:42.195809 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (9.106286ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:42.196546 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 1 not ready: 0 I0125 05:12:42.196884 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (14.054969ms) I0125 05:12:42.197030 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 1->1 I0125 05:12:42.197099 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (42.599µs) I0125 05:12:42.199575 4678 audit.go:125] 2017-01-25T05:12:42.199532431-05:00 AUDIT: id="a2ed3385-4e2c-46ba-8bca-43995b2a6cf9" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:12:42.201680 4678 audit.go:45] 2017-01-25T05:12:42.201666284-05:00 AUDIT: id="a2ed3385-4e2c-46ba-8bca-43995b2a6cf9" response="200" I0125 05:12:42.201742 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (4.189113ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:42.202260 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:42.202330 4678 roundrobin.go:257] LoadBalancerRR: Setting endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master: to [172.17.0.7:5432] I0125 05:12:42.202353 4678 roundrobin.go:83] LoadBalancerRR service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master:" did not exist, created I0125 05:12:42.202426 4678 proxier.go:616] Setting endpoints for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master:" to [172.17.0.7:5432] I0125 05:12:42.202483 4678 proxier.go:804] Syncing iptables rules I0125 05:12:42.202494 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:42.219662 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:12:42.220182 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (36.66881ms) I0125 05:12:42.220852 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:42.220905 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:42.220922 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:42.220932 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:42.220947 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:12:42.220972 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:42.220988 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:42.220997 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:42.221009 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:12:42.221801 4678 audit.go:125] 2017-01-25T05:12:42.221721115-05:00 AUDIT: id="9054e732-ca10-4ad1-9e0a-8ab0480389df" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:42.229454 4678 audit.go:45] 2017-01-25T05:12:42.229431966-05:00 AUDIT: id="9054e732-ca10-4ad1-9e0a-8ab0480389df" response="200" I0125 05:12:42.229632 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (8.335288ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:42.230267 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 2) I0125 05:12:42.230418 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:42.230405822 -0500 EST. I0125 05:12:42.231892 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:42.235770 4678 audit.go:125] 2017-01-25T05:12:42.235701325-05:00 AUDIT: id="8837cabf-592d-4520-bb2f-cbd1400007f2" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:42.237399 4678 audit.go:45] 2017-01-25T05:12:42.237385723-05:00 AUDIT: id="8837cabf-592d-4520-bb2f-cbd1400007f2" response="409" I0125 05:12:42.237481 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (2.175647ms) 409 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:42.237921 4678 controller.go:294] Cannot update the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master": Operation cannot be fulfilled on deploymentconfigs "postgresql-master": the object has been modified; please apply your changes to the latest version and try again I0125 05:12:42.237943 4678 controller.go:393] Error syncing deployment config extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master: Operation cannot be fulfilled on deploymentconfigs "postgresql-master": the object has been modified; please apply your changes to the latest version and try again I0125 05:12:42.238106 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:42.238082124 -0500 EST. I0125 05:12:42.239059 4678 audit.go:125] 2017-01-25T05:12:42.23901794-05:00 AUDIT: id="7021f716-3f74-4958-a88d-0a8337506036" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:42.241887 4678 audit.go:45] 2017-01-25T05:12:42.241874206-05:00 AUDIT: id="7021f716-3f74-4958-a88d-0a8337506036" response="200" I0125 05:12:42.242003 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (3.194899ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:42.242429 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 2) I0125 05:12:42.243160 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:42.243301 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:42.243290507 -0500 EST. I0125 05:12:42.255764 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:42.255742887 -0500 EST. I0125 05:12:42.256183 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:42.280669 4678 audit.go:125] 2017-01-25T05:12:42.280617969-05:00 AUDIT: id="42955f54-31ec-45f3-ab4c-aa375f2f61ba" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:42.282283 4678 audit.go:45] 2017-01-25T05:12:42.282269842-05:00 AUDIT: id="42955f54-31ec-45f3-ab4c-aa375f2f61ba" response="200" I0125 05:12:42.282766 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (4.790771ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40594] I0125 05:12:42.292319 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:42.322955 4678 audit.go:125] 2017-01-25T05:12:42.322893305-05:00 AUDIT: id="6bc1344c-5fd3-4494-aed5-bf7e8064b4ba" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true" I0125 05:12:42.323910 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:42.324083 4678 audit.go:125] 2017-01-25T05:12:42.324052787-05:00 AUDIT: id="33da72f2-d428-4c0b-8312-5063b0334783" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:42.336277 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=11023&timeoutSeconds=571: (40.017846802s) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:42.337575 4678 audit.go:45] 2017-01-25T05:12:42.337556325-05:00 AUDIT: id="33da72f2-d428-4c0b-8312-5063b0334783" response="200" I0125 05:12:42.337721 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (13.916822ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:42.338245 4678 admission.go:77] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:42.338291 4678 admission.go:88] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:42.339639 4678 audit.go:125] 2017-01-25T05:12:42.339606918-05:00 AUDIT: id="87ae6ba4-73e6-466e-8afa-9c5ec47557e0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:42.341027 4678 audit.go:125] 2017-01-25T05:12:42.340980073-05:00 AUDIT: id="47405aa3-1052-4f37-94db-0b19f25a235f" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-master-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db383709d-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:12:42.342590 4678 audit.go:45] 2017-01-25T05:12:42.342576809-05:00 AUDIT: id="87ae6ba4-73e6-466e-8afa-9c5ec47557e0" response="200" I0125 05:12:42.342649 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (3.301626ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:42.342879 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:42.342889 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:42.342895 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:42.342917 4678 admission.go:149] validating pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) against providers restricted I0125 05:12:42.342988 4678 admission.go:116] pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) validated against provider restricted I0125 05:12:42.344349 4678 audit.go:45] 2017-01-25T05:12:42.344335888-05:00 AUDIT: id="47405aa3-1052-4f37-94db-0b19f25a235f" response="200" I0125 05:12:42.344458 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-master-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db383709d-e2e6-11e6-a4b0-0e6a5cbf0094: (8.874023ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50088] I0125 05:12:42.351456 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:42.378184 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave"} I0125 05:12:42.384102 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:42.414360 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:42.451286 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:42.488261 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:42.522764 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:42.522808 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:42.533388 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&error=1&output=1: (155.46739ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41716] I0125 05:12:42.539104 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true: (218.252746ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40596] I0125 05:12:42.547622 4678 proxier.go:797] syncProxyRules took 345.134682ms I0125 05:12:42.547655 4678 proxier.go:566] OnEndpointsUpdate took 345.275038ms for 6 endpoints I0125 05:12:42.547692 4678 proxier.go:381] Received update notice: [] I0125 05:12:42.547725 4678 proxier.go:804] Syncing iptables rules I0125 05:12:42.547733 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:42.567688 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:12:42.587071 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:42.606397 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:54457" with type 1 I0125 05:12:42.606518 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:42.606594 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:42.613889 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:42.625007 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:53576" with type 28 I0125 05:12:42.625102 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:42.625188 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:42.625528 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:53576" with type 1 I0125 05:12:42.625559 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:42.625588 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:42.642319 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:12:42.652882 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:59820" with type 1 I0125 05:12:42.653138 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:42.653322 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:42.667235 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:12:42.686877 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:12:42.705475 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:12:42.725230 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:12:42.746127 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:12:42.746163 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:12:42.767686 4678 proxier.go:797] syncProxyRules took 219.952218ms I0125 05:12:42.767716 4678 proxier.go:431] OnServiceUpdate took 220.012807ms for 4 services I0125 05:12:42.839576 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:60429" with type 1 I0125 05:12:42.839645 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:42.839705 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:42.839993 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:60429" with type 28 I0125 05:12:42.840032 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:42.840074 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:42.904475 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:12:42.905084 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:12:42.905320 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:42.967305 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:42.967332 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:42.967929 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:42.967947 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:42.968759 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:42 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42df1e560 0 [] true false map[] 0xc434ec4780 } I0125 05:12:42.968814 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:42.968918 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:42 GMT]] 0xc42df1e680 0 [] true false map[] 0xc434ec45a0 } I0125 05:12:42.968950 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:43.000141 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:43.000162 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:12:43.317321 4678 generic.go:145] GenericPLEG: b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f: running -> exited I0125 05:12:43.323917 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc436ea18c0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/04ad5900 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42e261b00 NetworkSettings:0xc429d93600} I0125 05:12:43.328356 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4283ae9a0 Mounts:[] Config:0xc42616de60 NetworkSettings:0xc42da05400} I0125 05:12:43.332460 4678 generic.go:342] PLEG: Write status for postgresql-master-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.4", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a6e76c0), (*container.ContainerStatus)(0xc424890c40)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:43.332537 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f"} I0125 05:12:43.332607 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.332634 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:12:43.332772 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.333599 4678 audit.go:125] 2017-01-25T05:12:43.333561241-05:00 AUDIT: id="891ee966-190c-480e-a5ab-e362d7c6bf80" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy" I0125 05:12:43.335059 4678 audit.go:45] 2017-01-25T05:12:43.335043093-05:00 AUDIT: id="891ee966-190c-480e-a5ab-e362d7c6bf80" response="200" I0125 05:12:43.335172 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy: (1.916474ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:43.336283 4678 audit.go:125] 2017-01-25T05:12:43.336249709-05:00 AUDIT: id="c8ac469e-3a89-4d63-aa2c-c21d2b97f5b3" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status" I0125 05:12:43.338457 4678 audit.go:45] 2017-01-25T05:12:43.338442891-05:00 AUDIT: id="c8ac469e-3a89-4d63-aa2c-c21d2b97f5b3" response="200" I0125 05:12:43.338531 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status: (2.52983ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:43.339447 4678 replication_controller.go:378] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11037 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11085 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:43.339580 4678 replication_controller.go:255] No controllers found for pod postgresql-master-1-deploy, replication manager will avoid syncing I0125 05:12:43.339606 4678 replica_set.go:320] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11037 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11085 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:43.339672 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:43.339703 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-deploy, job controller will avoid syncing I0125 05:12:43.339726 4678 daemoncontroller.go:332] Pod postgresql-master-1-deploy updated. I0125 05:12:43.339755 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-deploy, daemon set controller will avoid syncing I0125 05:12:43.339773 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-deploy" I0125 05:12:43.339787 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:43.339792 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-deploy" I0125 05:12:43.339866 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:43.341047 4678 status_manager.go:425] Status for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Succeeded Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935963 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.4 StartTime:0xc432769900 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running: Terminated:0xc42c0f4b60} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f}]} version:3 podName:postgresql-master-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:43.341358 4678 config.go:281] Setting pods for source api I0125 05:12:43.342890 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.343634 4678 audit.go:125] 2017-01-25T05:12:43.343601974-05:00 AUDIT: id="76547f84-131a-4be5-af56-339e410c5d3a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:43.344036 4678 audit.go:125] 2017-01-25T05:12:43.344002699-05:00 AUDIT: id="c609ed25-5d89-4c94-b108-c83d76b76e18" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:43.344119 4678 audit.go:45] 2017-01-25T05:12:43.344102105-05:00 AUDIT: id="76547f84-131a-4be5-af56-339e410c5d3a" response="200" I0125 05:12:43.344426 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.069431ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:43.347250 4678 audit.go:45] 2017-01-25T05:12:43.347235588-05:00 AUDIT: id="c609ed25-5d89-4c94-b108-c83d76b76e18" response="200" I0125 05:12:43.348540 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (7.065901ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:43.348688 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:54819" with type 1 I0125 05:12:43.348736 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:43.348797 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:12:43.348776 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:43.348919 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:43.348906647 -0500 EST. I0125 05:12:43.348950 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.6:54819" with type 28 I0125 05:12:43.348985 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:43.349017 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:43.349344 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 1->1 I0125 05:12:43.349414 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (37.152µs) I0125 05:12:43.349841 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 status from Running to Complete (scale: 1) I0125 05:12:43.349965 4678 audit.go:125] 2017-01-25T05:12:43.349927239-05:00 AUDIT: id="98aadcff-0c0d-43c6-8f1e-e08e3f0ba8d7" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:43.352288 4678 audit.go:125] 2017-01-25T05:12:43.352242237-05:00 AUDIT: id="534faba7-4e33-47c5-8921-a65618a57fd4" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy" I0125 05:12:43.352622 4678 audit.go:45] 2017-01-25T05:12:43.352609331-05:00 AUDIT: id="98aadcff-0c0d-43c6-8f1e-e08e3f0ba8d7" response="200" I0125 05:12:43.352737 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (3.066119ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:43.353088 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 2) I0125 05:12:43.353840 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:43.353932 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:43.353920557 -0500 EST. I0125 05:12:43.355410 4678 replication_controller.go:378] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11085 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11088 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp:2017-01-25 05:12:43.353565975 -0500 EST DeletionGracePeriodSeconds:0xc42bfa0600 Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:43.355481 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:12:43.353565975 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-master-1]. I0125 05:12:43.355541 4678 replication_controller.go:255] No controllers found for pod postgresql-master-1-deploy, replication manager will avoid syncing I0125 05:12:43.355563 4678 replica_set.go:320] Pod postgresql-master-1-deploy updated, objectMeta {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11085 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy UID:b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11088 Generation:0 CreationTimestamp:2017-01-25 05:11:56.444439356 -0500 EST DeletionTimestamp:2017-01-25 05:12:43.353565975 -0500 EST DeletionGracePeriodSeconds:0xc42bfa0600 Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:43.355613 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:12:43.353565975 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy", UID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11088", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935916, nsec:444439356, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc428483d00), DeletionGracePeriodSeconds:(*int64)(0xc42bfa0600), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-master-1"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-master-1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42db881e0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-master-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42db88540), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc42bfa06d0), ActiveDeadlineSeconds:(*int64)(0xc42bfa06d8), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc426e12800), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935963, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.4", StartTime:(*unversioned.Time)(0xc428483f80), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc42e8c0620)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f"}}}}. I0125 05:12:43.355892 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:43.355921 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-deploy, job controller will avoid syncing I0125 05:12:43.355935 4678 daemoncontroller.go:332] Pod postgresql-master-1-deploy updated. I0125 05:12:43.355958 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-deploy, daemon set controller will avoid syncing I0125 05:12:43.355974 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-deploy" I0125 05:12:43.355988 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:43.355993 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-deploy" I0125 05:12:43.356056 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:43.356678 4678 config.go:281] Setting pods for source api I0125 05:12:43.357830 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.358484 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:43.358520 4678 audit.go:125] 2017-01-25T05:12:43.358486744-05:00 AUDIT: id="ac18b7be-177e-4fda-b649-46ad46616819" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy" I0125 05:12:43.359776 4678 audit.go:125] 2017-01-25T05:12:43.35974069-05:00 AUDIT: id="e42a3aa4-173c-4bb2-83c2-814052871a40" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:43.360779 4678 audit.go:45] 2017-01-25T05:12:43.36076485-05:00 AUDIT: id="ac18b7be-177e-4fda-b649-46ad46616819" response="200" I0125 05:12:43.360802 4678 audit.go:45] 2017-01-25T05:12:43.360791621-05:00 AUDIT: id="e42a3aa4-173c-4bb2-83c2-814052871a40" response="200" I0125 05:12:43.360856 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy: (2.620835ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:43.361005 4678 audit.go:45] 2017-01-25T05:12:43.360989331-05:00 AUDIT: id="534faba7-4e33-47c5-8921-a65618a57fd4" response="200" I0125 05:12:43.361067 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (1.560875ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:43.361076 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy: (10.778626ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:43.361573 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:12:43.353565975 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-master-1]. I0125 05:12:43.361606 4678 config.go:281] Setting pods for source api I0125 05:12:43.361637 4678 replication_controller.go:255] No controllers found for pod postgresql-master-1-deploy, replication manager will avoid syncing I0125 05:12:43.361654 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:12:43.353565975 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy", UID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11089", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935916, nsec:444439356, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42e63a140), DeletionGracePeriodSeconds:(*int64)(0xc4299d8438), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-master-1"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/deployment.name":"postgresql-master-1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc435a7b440), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-master-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc435a7b500), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc4299d8700), ActiveDeadlineSeconds:(*int64)(0xc4299d8708), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc43076aec0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935963, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.4", StartTime:(*unversioned.Time)(0xc42e63af20), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc429cd8150)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f"}}}}. I0125 05:12:43.361939 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-deploy, ReplicaSet controller will avoid syncing I0125 05:12:43.361964 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-deploy, job controller will avoid syncing I0125 05:12:43.361976 4678 daemoncontroller.go:367] Pod postgresql-master-1-deploy deleted. I0125 05:12:43.361997 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-deploy, daemon set controller will avoid syncing I0125 05:12:43.362009 4678 disruption.go:355] deletePod called on pod "postgresql-master-1-deploy" I0125 05:12:43.362020 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:43.362026 4678 disruption.go:358] No matching pdb for pod "postgresql-master-1-deploy" I0125 05:12:43.362072 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:12:43.362088 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-deploy, StatefulSet controller will avoid syncing I0125 05:12:43.362844 4678 audit.go:125] 2017-01-25T05:12:43.362810366-05:00 AUDIT: id="ff5684a2-9005-417e-91ad-cbbf86428f45" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status" I0125 05:12:43.362960 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.363018 4678 kubelet_pods.go:785] Killing unwanted pod "postgresql-master-1-deploy" I0125 05:12:43.363154 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:43.363408 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:43.363588 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:43.363637 4678 docker_manager.go:1536] Killing container "a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy" with 0 second grace period I0125 05:12:43.365299 4678 audit.go:45] 2017-01-25T05:12:43.365285824-05:00 AUDIT: id="ff5684a2-9005-417e-91ad-cbbf86428f45" response="409" I0125 05:12:43.365355 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-deploy/status: (2.779268ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] W0125 05:12:43.365641 4678 status_manager.go:451] Failed to update status for pod "_()": Operation cannot be fulfilled on pods "postgresql-master-1-deploy": StorageError: invalid object, Code: 4, Key: kubernetes.io/pods/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094, UID in object meta: I0125 05:12:43.367787 4678 docker_manager.go:1577] Container "a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy" exited after 4.125394ms I0125 05:12:43.368183 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-deploy", UID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10959", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Killing' Killing container with docker id a14e6dfd9e5b: Need to kill pod. I0125 05:12:43.368725 4678 audit.go:125] 2017-01-25T05:12:43.368689707-05:00 AUDIT: id="63d27e5d-b275-44d7-bc7d-39568b47c6a7" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:43.371370 4678 audit.go:45] 2017-01-25T05:12:43.371351179-05:00 AUDIT: id="63d27e5d-b275-44d7-bc7d-39568b47c6a7" response="201" I0125 05:12:43.371388 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:43.371427 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.990377ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:43.372878 4678 docker_manager.go:1536] Killing container "b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy" with 0 second grace period I0125 05:12:43.397354 4678 manager.go:955] Destroyed container: "/system.slice/docker-b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a.scope" (aliases: [k8s_POD.f321dce3_postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094_6c0f26e5 b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a], namespace: "docker") I0125 05:12:43.397403 4678 handler.go:325] Added event &{/system.slice/docker-b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a.scope 2017-01-25 05:12:43.397389494 -0500 EST containerDeletion {}} I0125 05:12:43.572920 4678 docker_manager.go:1577] Container "b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy" exited after 200.018682ms I0125 05:12:43.633038 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.633729 4678 audit.go:125] 2017-01-25T05:12:43.633692075-05:00 AUDIT: id="ca9baba1-76fa-49f5-837b-f5237199dba2" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:43.634806 4678 audit.go:45] 2017-01-25T05:12:43.634794449-05:00 AUDIT: id="ca9baba1-76fa-49f5-837b-f5237199dba2" response="200" I0125 05:12:43.634969 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.494447ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:43.635165 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.635250 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:43.635268 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:12:43.635292 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[]} I0125 05:12:43.635318 4678 docker_manager.go:2093] Killing Infra Container for "postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094)" because all other containers are dead. I0125 05:12:43.637972 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:43.639375 4678 docker_manager.go:1536] Killing container "b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy" with 10 second grace period I0125 05:12:43.640881 4678 docker_manager.go:1577] Container "b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy" exited after 1.488397ms W0125 05:12:43.640903 4678 docker_manager.go:1583] No ref for pod '"b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-deploy"' I0125 05:12:43.684627 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:43.692124 4678 kubelet_volumes.go:104] Orphaned pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" found, but volumes are not cleaned up I0125 05:12:44.023469 4678 audit.go:125] 2017-01-25T05:12:44.023428189-05:00 AUDIT: id="26e3c2d6-0de4-44d6-8570-e2bceccd0ad9" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:44.024544 4678 audit.go:45] 2017-01-25T05:12:44.024529498-05:00 AUDIT: id="26e3c2d6-0de4-44d6-8570-e2bceccd0ad9" response="200" I0125 05:12:44.024913 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (3.105574ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:44.211901 4678 audit.go:125] 2017-01-25T05:12:44.211864229-05:00 AUDIT: id="81e36362-0498-438c-b323-22cfe0c4a62c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:44.212922 4678 audit.go:45] 2017-01-25T05:12:44.212911723-05:00 AUDIT: id="81e36362-0498-438c-b323-22cfe0c4a62c" response="200" I0125 05:12:44.213282 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (2.876591ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40620] I0125 05:12:44.222106 4678 audit.go:125] 2017-01-25T05:12:44.222057927-05:00 AUDIT: id="0505e2bc-c9f3-46e6-9054-2c05a0a60808" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true" I0125 05:12:44.222950 4678 audit.go:125] 2017-01-25T05:12:44.222927073-05:00 AUDIT: id="ec835e78-5849-44ba-84cb-756fa09567ea" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:44.223856 4678 audit.go:45] 2017-01-25T05:12:44.223843409-05:00 AUDIT: id="ec835e78-5849-44ba-84cb-756fa09567ea" response="200" I0125 05:12:44.223945 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (1.207941ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:44.224255 4678 admission.go:77] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:44.224283 4678 admission.go:88] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:44.224741 4678 audit.go:125] 2017-01-25T05:12:44.224716984-05:00 AUDIT: id="733f987e-a75b-4e61-b8c4-93b7960394f8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:44.225574 4678 audit.go:45] 2017-01-25T05:12:44.225564268-05:00 AUDIT: id="733f987e-a75b-4e61-b8c4-93b7960394f8" response="200" I0125 05:12:44.225624 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.062718ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:44.225908 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.225917 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.225921 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.225950 4678 admission.go:149] validating pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) against providers restricted I0125 05:12:44.226009 4678 admission.go:116] pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) validated against provider restricted I0125 05:12:44.238101 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave"} I0125 05:12:44.297788 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&error=1&output=1: (59.881743ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41742] I0125 05:12:44.299456 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgresql%3A%2F%2Fpostgres%40127.0.0.1+-x+-c+%22SELECT+1%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true: (78.908801ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40622] I0125 05:12:44.301059 4678 audit.go:125] 2017-01-25T05:12:44.301025673-05:00 AUDIT: id="b75e649b-5a07-4337-bbc7-604f1216bf28" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:12:44.302353 4678 audit.go:45] 2017-01-25T05:12:44.302338565-05:00 AUDIT: id="b75e649b-5a07-4337-bbc7-604f1216bf28" response="200" I0125 05:12:44.302439 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (1.622345ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:12:44.305571 4678 audit.go:125] 2017-01-25T05:12:44.305534422-05:00 AUDIT: id="b5623964-de1c-4f19-aa90-b61323795d46" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:44.306493 4678 audit.go:45] 2017-01-25T05:12:44.306481191-05:00 AUDIT: id="b5623964-de1c-4f19-aa90-b61323795d46" response="200" I0125 05:12:44.306709 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (2.597318ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:44.309658 4678 audit.go:125] 2017-01-25T05:12:44.30962955-05:00 AUDIT: id="e732577b-68ba-4629-8d9e-2936b726dea7" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:44.310578 4678 audit.go:45] 2017-01-25T05:12:44.31056172-05:00 AUDIT: id="e732577b-68ba-4629-8d9e-2936b726dea7" response="200" I0125 05:12:44.310765 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.422861ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:44.340005 4678 generic.go:145] GenericPLEG: b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a: running -> exited I0125 05:12:44.349115 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc425d05ce0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/04ad5900 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc427b939e0 NetworkSettings:0xc430172300} I0125 05:12:44.354393 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc428bb4f20 Mounts:[] Config:0xc42468dc20 NetworkSettings:0xc4274e4d00} I0125 05:12:44.356701 4678 generic.go:342] PLEG: Write status for postgresql-master-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc435b5d5e0), (*container.ContainerStatus)(0xc435b5d6c0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:44.356762 4678 kubelet.go:1820] SyncLoop (PLEG): ignore irrelevant event: &pleg.PodLifecycleEvent{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a"} I0125 05:12:44.527981 4678 audit.go:125] 2017-01-25T05:12:44.527932291-05:00 AUDIT: id="896f0a50-0ec2-4bca-8137-468891ac6c5d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:44.528971 4678 audit.go:45] 2017-01-25T05:12:44.528959004-05:00 AUDIT: id="896f0a50-0ec2-4bca-8137-468891ac6c5d" response="200" I0125 05:12:44.529182 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (2.829272ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40628] I0125 05:12:44.538234 4678 audit.go:125] 2017-01-25T05:12:44.538168335-05:00 AUDIT: id="4184eb31-0477-4835-9105-180ef8416b04" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40postgresql-master%2Fuserdb+-x+-c+%22SELECT+1%3B%22&container=postgresql&container=postgresql&stderr=true&stdout=true" I0125 05:12:44.539057 4678 audit.go:125] 2017-01-25T05:12:44.539033224-05:00 AUDIT: id="0dbd8f37-8466-4256-b871-524391d9e11e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:12:44.539983 4678 audit.go:45] 2017-01-25T05:12:44.539970123-05:00 AUDIT: id="0dbd8f37-8466-4256-b871-524391d9e11e" response="200" I0125 05:12:44.540085 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (1.227999ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:44.540375 4678 admission.go:77] getting security context constraints for pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:44.540404 4678 admission.go:88] getting security context constraints for pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:44.540816 4678 audit.go:125] 2017-01-25T05:12:44.540794062-05:00 AUDIT: id="f888fecf-a5ae-4762-a906-433bfc0f8dee" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:44.541619 4678 audit.go:45] 2017-01-25T05:12:44.54160931-05:00 AUDIT: id="f888fecf-a5ae-4762-a906-433bfc0f8dee" response="200" I0125 05:12:44.541669 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.037344ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:44.541898 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.541907 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.541911 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.541928 4678 admission.go:149] validating pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) against providers restricted I0125 05:12:44.541979 4678 admission.go:116] pod postgresql-helper-1-cpv6d (generate: postgresql-helper-1-) validated against provider restricted I0125 05:12:44.553775 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/postgresql"} I0125 05:12:44.603830 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.8:42078" with type 28 I0125 05:12:44.603875 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:44.603955 4678 logs.go:41] skydns: received DNS Request for "postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local." from "172.17.0.8:42078" with type 1 I0125 05:12:44.603987 4678 serviceresolver.go:90] Answering query postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false I0125 05:12:44.603924 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:44.604019 4678 serviceresolver.go:276] Answered postgresql-master.extended-test-postgresql-replication-1-34bbd-xd4g8.svc.cluster.local.:false with []msg.Service{msg.Service{Host:"172.17.0.7", Port:0, Priority:10, Weight:10, Text:"", Mail:false, Ttl:0x1e, TargetStrip:0, Group:"", Key:"/skydns/local/cluster/svc/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master/8e89f024"}} I0125 05:12:44.610716 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d/postgresql?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40postgresql-master%2Fuserdb+-x+-c+%22SELECT+1%3B%22&error=1&output=1: (57.140076ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41750] I0125 05:12:44.611288 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40postgresql-master%2Fuserdb+-x+-c+%22SELECT+1%3B%22&container=postgresql&container=postgresql&stderr=true&stdout=true: (74.52416ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40630] I0125 05:12:44.616135 4678 audit.go:125] 2017-01-25T05:12:44.616090909-05:00 AUDIT: id="646cedd6-6fa4-4599-b571-e16ebcb4296c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:44.617142 4678 audit.go:45] 2017-01-25T05:12:44.617131625-05:00 AUDIT: id="646cedd6-6fa4-4599-b571-e16ebcb4296c" response="200" I0125 05:12:44.617351 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.740176ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:44.620476 4678 audit.go:125] 2017-01-25T05:12:44.620432821-05:00 AUDIT: id="60f973f8-09a1-4bdb-9b7f-559ab24ea50f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:44.621477 4678 audit.go:45] 2017-01-25T05:12:44.621464947-05:00 AUDIT: id="60f973f8-09a1-4bdb-9b7f-559ab24ea50f" response="200" I0125 05:12:44.621666 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.58313ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:44.807475 4678 audit.go:125] 2017-01-25T05:12:44.807438568-05:00 AUDIT: id="cdf0fde6-7a90-48ce-aa34-2938b6adeb7f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:44.808378 4678 audit.go:45] 2017-01-25T05:12:44.808366339-05:00 AUDIT: id="cdf0fde6-7a90-48ce-aa34-2938b6adeb7f" response="200" I0125 05:12:44.808578 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.536826ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40636] I0125 05:12:44.817509 4678 audit.go:125] 2017-01-25T05:12:44.81745306-05:00 AUDIT: id="3a725573-d9bd-449a-854b-fc878d001a95" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22CREATE+TABLE+table_01+%28col1+VARCHAR%2820%29%2C+col2+VARCHAR%2820%29%29%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true" I0125 05:12:44.818334 4678 audit.go:125] 2017-01-25T05:12:44.818299146-05:00 AUDIT: id="089fe707-f9dd-4d55-a83f-d2128c6dbb02" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:44.819180 4678 audit.go:45] 2017-01-25T05:12:44.81916748-05:00 AUDIT: id="089fe707-f9dd-4d55-a83f-d2128c6dbb02" response="200" I0125 05:12:44.819290 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (1.182924ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:44.819565 4678 admission.go:77] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:44.819597 4678 admission.go:88] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:44.820028 4678 audit.go:125] 2017-01-25T05:12:44.82000707-05:00 AUDIT: id="f7cb58e9-af41-41b0-bac6-64de37460a21" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:44.820854 4678 audit.go:45] 2017-01-25T05:12:44.820844906-05:00 AUDIT: id="f7cb58e9-af41-41b0-bac6-64de37460a21" response="200" I0125 05:12:44.820912 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.05247ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:44.821117 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.821125 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.821129 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:44.821146 4678 admission.go:149] validating pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) against providers restricted I0125 05:12:44.821190 4678 admission.go:116] pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) validated against provider restricted I0125 05:12:44.833070 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master"} I0125 05:12:44.916617 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22CREATE+TABLE+table_01+%28col1+VARCHAR%2820%29%2C+col2+VARCHAR%2820%29%29%3B%22&error=1&output=1: (83.763831ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41758] E0125 05:12:44.917582 4678 proxy.go:193] Error proxying data from client to backend: write tcp 172.18.7.222:41758->172.18.7.222:10250: write: broken pipe I0125 05:12:44.917703 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22CREATE+TABLE+table_01+%28col1+VARCHAR%2820%29%2C+col2+VARCHAR%2820%29%29%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true: (101.671377ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40638] I0125 05:12:44.923586 4678 audit.go:125] 2017-01-25T05:12:44.923536484-05:00 AUDIT: id="0ab51c21-c394-4782-955a-92916862555f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:44.924803 4678 audit.go:45] 2017-01-25T05:12:44.924787682-05:00 AUDIT: id="0ab51c21-c394-4782-955a-92916862555f" response="200" I0125 05:12:44.925079 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (3.378773ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:44.930473 4678 audit.go:125] 2017-01-25T05:12:44.930423743-05:00 AUDIT: id="f7121f44-fd55-4568-81c7-714cca70ccf5" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:44.931717 4678 audit.go:45] 2017-01-25T05:12:44.931701753-05:00 AUDIT: id="f7121f44-fd55-4568-81c7-714cca70ccf5" response="200" I0125 05:12:44.932007 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (3.257542ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:45.126222 4678 audit.go:125] 2017-01-25T05:12:45.126162796-05:00 AUDIT: id="08caf68b-addc-45b0-9027-f01ccadb731b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:45.127296 4678 audit.go:45] 2017-01-25T05:12:45.127279884-05:00 AUDIT: id="08caf68b-addc-45b0-9027-f01ccadb731b" response="200" I0125 05:12:45.127503 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.779855ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40644] I0125 05:12:45.136058 4678 audit.go:125] 2017-01-25T05:12:45.136011683-05:00 AUDIT: id="25ce0615-5a6c-4935-bcc0-2108a222ebb5" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22INSERT+INTO+table_01+%28col1%2C+col2%29+VALUES+%28%27val1%27%2C+%27val2%27%29%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true" I0125 05:12:45.136895 4678 audit.go:125] 2017-01-25T05:12:45.13687008-05:00 AUDIT: id="fa076e31-d73a-470e-ac94-aef9092687b8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:45.137802 4678 audit.go:45] 2017-01-25T05:12:45.137788699-05:00 AUDIT: id="fa076e31-d73a-470e-ac94-aef9092687b8" response="200" I0125 05:12:45.137892 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (1.19653ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:45.138207 4678 admission.go:77] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:45.138247 4678 admission.go:88] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:45.138751 4678 audit.go:125] 2017-01-25T05:12:45.138727627-05:00 AUDIT: id="8d16cb18-f073-444c-b9a9-446ac655377b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:45.139585 4678 audit.go:45] 2017-01-25T05:12:45.139575228-05:00 AUDIT: id="8d16cb18-f073-444c-b9a9-446ac655377b" response="200" I0125 05:12:45.139636 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.070143ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:45.139856 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:45.139865 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:45.139869 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:45.139884 4678 admission.go:149] validating pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) against providers restricted I0125 05:12:45.139927 4678 admission.go:116] pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) validated against provider restricted I0125 05:12:45.151749 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master"} I0125 05:12:45.207726 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22INSERT+INTO+table_01+%28col1%2C+col2%29+VALUES+%28%27val1%27%2C+%27val2%27%29%3B%22&error=1&output=1: (56.18642ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41766] I0125 05:12:45.209459 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22INSERT+INTO+table_01+%28col1%2C+col2%29+VALUES+%28%27val1%27%2C+%27val2%27%29%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true: (74.92246ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40646] I0125 05:12:45.227610 4678 audit.go:125] 2017-01-25T05:12:45.22757898-05:00 AUDIT: id="91ceaae3-f77b-4ee0-b039-1fcb45729576" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:12:45.228495 4678 audit.go:45] 2017-01-25T05:12:45.228484306-05:00 AUDIT: id="91ceaae3-f77b-4ee0-b039-1fcb45729576" response="200" I0125 05:12:45.228572 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.187966ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:45.472565 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") from pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:45.472666 4678 util.go:340] Tearing down volume deployer-token-r7jj8 for pod b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:45.472711 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:45.493330 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (OuterVolumeSpecName: "deployer-token-r7jj8") pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "deployer-token-r7jj8". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:12:45.684630 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:45.692195 4678 kubelet_volumes.go:113] Orphaned pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:12:46.465122 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:46.465143 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:46.552182 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:12:47.101599 4678 helpers.go:101] Unable to get network stats from pid 10368: couldn't read network stats: failure opening /proc/10368/net/dev: open /proc/10368/net/dev: no such file or directory I0125 05:12:47.344431 4678 panics.go:76] GET /api/v1/watch/secrets?fieldSelector=type%3Dkubernetes.io%2Fservice-account-token&resourceVersion=9495&timeoutSeconds=556: (9m16.0011723s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:47.344745 4678 reflector.go:392] github.com/openshift/origin/pkg/serviceaccounts/controllers/create_dockercfg_secrets.go:221: Watch close - *api.Secret total 126 items received I0125 05:12:47.345438 4678 audit.go:125] 2017-01-25T05:12:47.345402718-05:00 AUDIT: id="0619c098-2979-405a-a4f8-a841d03fd81e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/secrets?fieldSelector=type%3Dkubernetes.io%2Fservice-account-token&resourceVersion=10931&timeoutSeconds=357" I0125 05:12:47.345946 4678 audit.go:45] 2017-01-25T05:12:47.345936736-05:00 AUDIT: id="0619c098-2979-405a-a4f8-a841d03fd81e" response="200" I0125 05:12:47.406963 4678 audit.go:125] 2017-01-25T05:12:47.406927591-05:00 AUDIT: id="8d86d87c-626d-49d7-b35a-b05c8f737cb9" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:12:47.407413 4678 audit.go:45] 2017-01-25T05:12:47.407397516-05:00 AUDIT: id="8d86d87c-626d-49d7-b35a-b05c8f737cb9" response="200" I0125 05:12:47.407748 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.016294ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:47.475743 4678 audit.go:125] 2017-01-25T05:12:47.475710432-05:00 AUDIT: id="b0052e90-f4d2-4e24-8aeb-f6baed7d4a44" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:12:47.478053 4678 audit.go:45] 2017-01-25T05:12:47.478033783-05:00 AUDIT: id="b0052e90-f4d2-4e24-8aeb-f6baed7d4a44" response="200" I0125 05:12:47.478374 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.856952ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:47.479268 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:12:47.514357 4678 panics.go:76] GET /api/v1/watch/replicationcontrollers?resourceVersion=9779&timeoutSeconds=596: (9m56.003094267s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:disruption-controller] 172.18.7.222:50846] I0125 05:12:47.514631 4678 reflector.go:392] pkg/controller/disruption/disruption.go:283: Watch close - *api.ReplicationController total 113 items received I0125 05:12:47.516977 4678 audit.go:125] 2017-01-25T05:12:47.516934788-05:00 AUDIT: id="1dfa9db2-fc24-4a51-825b-83076d1da8b0" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:disruption-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/replicationcontrollers?resourceVersion=11086&timeoutSeconds=425" I0125 05:12:47.517397 4678 audit.go:45] 2017-01-25T05:12:47.51738709-05:00 AUDIT: id="1dfa9db2-fc24-4a51-825b-83076d1da8b0" response="200" I0125 05:12:47.684645 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:48.345663 4678 audit.go:125] 2017-01-25T05:12:48.345628787-05:00 AUDIT: id="d1c19cfb-3106-46ef-8064-706fc7dc1caf" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:48.346069 4678 audit.go:45] 2017-01-25T05:12:48.346059312-05:00 AUDIT: id="d1c19cfb-3106-46ef-8064-706fc7dc1caf" response="200" I0125 05:12:48.346389 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (971.344µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:48.346685 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:12:49.131721 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:12:49.131783 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:12:49.131802 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:49.131823 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:49.131835 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:12:49.131841 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:49.131848 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:49.131900 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:12:49.131938 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:49.131950 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:12:49.131960 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:49.131968 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:49.131974 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:49.131980 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:49.131990 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:12:49.132008 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:49.132020 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:12:49.132025 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:12:49.132026 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:12:49.132035 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:12:49.132038 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:12:49.132077 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:12:49.132087 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:12:49.132108 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:12:49.132106 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:12:49.132127 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:12:49.192312 4678 audit.go:125] 2017-01-25T05:12:49.192269394-05:00 AUDIT: id="f34c95cf-d638-4248-8234-b33dc5da3d3a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:12:49.193205 4678 audit.go:45] 2017-01-25T05:12:49.193194524-05:00 AUDIT: id="f34c95cf-d638-4248-8234-b33dc5da3d3a" response="200" I0125 05:12:49.193302 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.878516ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:49.193549 4678 controller.go:106] Found 0 cronjobs I0125 05:12:49.195379 4678 audit.go:125] 2017-01-25T05:12:49.195360152-05:00 AUDIT: id="a0c67266-53f4-4677-b354-aaaac2da08c2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:12:49.196190 4678 audit.go:45] 2017-01-25T05:12:49.196179617-05:00 AUDIT: id="a0c67266-53f4-4677-b354-aaaac2da08c2" response="200" I0125 05:12:49.196241 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.459626ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:49.196449 4678 controller.go:114] Found 0 jobs I0125 05:12:49.196458 4678 controller.go:117] Found 0 groups I0125 05:12:49.260987 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:49.261016 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:49.261702 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:49.261719 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:49.263152 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc438a73da0 -1 [] true false map[] 0xc4278dd590 } I0125 05:12:49.263166 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc4394bf280 -1 [] true false map[] 0xc4267de3c0 } I0125 05:12:49.263199 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:49.263199 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:49.684595 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:50.213547 4678 audit.go:125] 2017-01-25T05:12:50.213506919-05:00 AUDIT: id="2c5db2f2-5eda-4323-9e3b-e65d4b9cbc66" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:50.214566 4678 audit.go:45] 2017-01-25T05:12:50.214552974-05:00 AUDIT: id="2c5db2f2-5eda-4323-9e3b-e65d4b9cbc66" response="200" I0125 05:12:50.214761 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.84194ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:50.217913 4678 audit.go:125] 2017-01-25T05:12:50.217876979-05:00 AUDIT: id="c6a91511-d68a-4b56-973c-310fc6329629" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:50.218849 4678 audit.go:45] 2017-01-25T05:12:50.218838322-05:00 AUDIT: id="c6a91511-d68a-4b56-973c-310fc6329629" response="200" I0125 05:12:50.219022 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.401286ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:50.414262 4678 audit.go:125] 2017-01-25T05:12:50.414220149-05:00 AUDIT: id="ce45a54b-3b2a-45d9-a14b-365732fc2f28" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:50.415307 4678 audit.go:45] 2017-01-25T05:12:50.415273721-05:00 AUDIT: id="ce45a54b-3b2a-45d9-a14b-365732fc2f28" response="200" I0125 05:12:50.415512 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.857371ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40660] I0125 05:12:50.424239 4678 audit.go:125] 2017-01-25T05:12:50.424184057-05:00 AUDIT: id="51ac87fb-7649-42ab-85d8-29cf9b895a34" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22SELECT+%2A+FROM+table_01%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true" I0125 05:12:50.425065 4678 audit.go:125] 2017-01-25T05:12:50.425042161-05:00 AUDIT: id="4502dc4c-ad69-4e7b-9ad7-842a4d8e163b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:50.425941 4678 audit.go:45] 2017-01-25T05:12:50.425928944-05:00 AUDIT: id="4502dc4c-ad69-4e7b-9ad7-842a4d8e163b" response="200" I0125 05:12:50.426052 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (1.18175ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:50.426379 4678 admission.go:77] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:50.426419 4678 admission.go:88] getting security context constraints for pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:50.426909 4678 audit.go:125] 2017-01-25T05:12:50.426885602-05:00 AUDIT: id="6ab87e9f-fc74-4d31-817e-ede91b058362" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:50.427744 4678 audit.go:45] 2017-01-25T05:12:50.427734395-05:00 AUDIT: id="6ab87e9f-fc74-4d31-817e-ede91b058362" response="200" I0125 05:12:50.427793 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.077154ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:50.428023 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:50.428032 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:50.428042 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:50.428056 4678 admission.go:149] validating pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) against providers restricted I0125 05:12:50.428112 4678 admission.go:116] pod postgresql-master-1-6jfgj (generate: postgresql-master-1-) validated against provider restricted I0125 05:12:50.439923 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master"} I0125 05:12:50.495423 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj/postgresql-master?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22SELECT+%2A+FROM+table_01%3B%22&error=1&output=1: (55.692098ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41782] I0125 05:12:50.495999 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22SELECT+%2A+FROM+table_01%3B%22&container=postgresql-master&container=postgresql-master&stderr=true&stdout=true: (73.302103ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40662] I0125 05:12:50.608289 4678 panics.go:76] GET /api/v1/watch/namespaces?resourceVersion=10150&timeoutSeconds=438: (7m18.00115348s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:50.608521 4678 reflector.go:392] github.com/openshift/origin/pkg/project/controller/factory.go:36: Watch close - *api.Namespace total 22 items received I0125 05:12:50.609137 4678 audit.go:125] 2017-01-25T05:12:50.609101097-05:00 AUDIT: id="849e7f93-6377-4b47-be64-9cc1dfa0ea07" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/namespaces?resourceVersion=10903&timeoutSeconds=441" I0125 05:12:50.609576 4678 audit.go:45] 2017-01-25T05:12:50.609566261-05:00 AUDIT: id="849e7f93-6377-4b47-be64-9cc1dfa0ea07" response="200" I0125 05:12:50.932482 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-slave centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-slave] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_SERVICE_NAME postgresql-master } {POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc42cb02330 0xc42cb02360 /dev/termination-log IfNotPresent 0xc42cb02390 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:51.004847 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:51.004870 4678 prober.go:113] Readiness probe for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-slave" succeeded I0125 05:12:51.005564 4678 audit.go:125] 2017-01-25T05:12:51.005531092-05:00 AUDIT: id="ed1630b3-c8d6-4fd4-bccd-cffcde3add0a" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:51.006770 4678 audit.go:45] 2017-01-25T05:12:51.006759876-05:00 AUDIT: id="ed1630b3-c8d6-4fd4-bccd-cffcde3add0a" response="200" I0125 05:12:51.006865 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (1.541682ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:51.007714 4678 audit.go:125] 2017-01-25T05:12:51.007689237-05:00 AUDIT: id="46e8d898-4e03-4f7e-b54f-fe796bbd5aa2" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status" I0125 05:12:51.009276 4678 audit.go:45] 2017-01-25T05:12:51.009261707-05:00 AUDIT: id="46e8d898-4e03-4f7e-b54f-fe796bbd5aa2" response="200" I0125 05:12:51.009352 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status: (1.830905ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:51.009760 4678 config.go:281] Setting pods for source api I0125 05:12:51.010647 4678 replication_controller.go:378] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11076 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc427f037e8 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11093 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc424399428 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:51.010744 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST, labels map[deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1]. I0125 05:12:51.010862 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc in state Running, deletion time 2017-01-25 05:12:31.005970671 -0500 EST I0125 05:12:51.010885 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (50.431µs) I0125 05:12:51.010915 4678 replica_set.go:320] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11076 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc427f037e8 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11093 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc424399428 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:51.011218 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:51.010986 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-qt1rc", GenerateName:"postgresql-slave-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11093", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935920, nsec:921802332, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc4394eb8e0), DeletionGracePeriodSeconds:(*int64)(0xc424399428), Labels:map[string]string{"deploymentconfig":"postgresql-slave", "name":"postgresql-slave", "app":"pg-replica-example", "deployment":"postgresql-slave-1"}, Annotations:map[string]string{"openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-slave", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-slave-1\",\"uid\":\"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"10998\"}}\n", "openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc424399c30), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42e604f90), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-slave", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-slave"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_SERVICE_NAME", Value:"postgresql-master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42e605050), ReadinessProbe:(*api.Probe)(0xc42e605080), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42e6050b0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc4242a22f0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc428942d80), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935971, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.6", StartTime:(*unversioned.Time)(0xc4394ebbe0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-slave", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(0xc4394ebc00), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:true, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48"}}}}. I0125 05:12:51.011301 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:51.011334 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:51.011349 4678 daemoncontroller.go:332] Pod postgresql-slave-1-qt1rc updated. I0125 05:12:51.011380 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:51.011357 4678 status_manager.go:425] Status for pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935971 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.6 StartTime:0xc425ae2000 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-slave State:{Waiting: Running:0xc43819b780 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48}]} version:3 podName:postgresql-slave-1-qt1rc podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:51.011407 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:51.011414 4678 status_manager.go:435] Pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" is terminated, but some containers are still running I0125 05:12:51.011423 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:51.011429 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:51.011465 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:51.014139 4678 audit.go:125] 2017-01-25T05:12:51.014110234-05:00 AUDIT: id="d964887d-ab26-4b38-a2f0-fea4c09e1351" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:51.014981 4678 audit.go:45] 2017-01-25T05:12:51.014967915-05:00 AUDIT: id="d964887d-ab26-4b38-a2f0-fea4c09e1351" response="200" I0125 05:12:51.015036 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (2.478698ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:51.015283 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:51.017225 4678 audit.go:125] 2017-01-25T05:12:51.017181013-05:00 AUDIT: id="cca60e75-e4a2-4901-b15b-799998128ea7" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:51.017721 4678 audit.go:45] 2017-01-25T05:12:51.01770804-05:00 AUDIT: id="cca60e75-e4a2-4901-b15b-799998128ea7" response="200" I0125 05:12:51.017763 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (2.144679ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:51.017987 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (6.460612ms) I0125 05:12:51.684655 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:51.927327 4678 audit.go:125] 2017-01-25T05:12:51.927286323-05:00 AUDIT: id="285b3406-3158-4e84-b2f5-83302418264a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:12:51.928450 4678 audit.go:45] 2017-01-25T05:12:51.928438936-05:00 AUDIT: id="285b3406-3158-4e84-b2f5-83302418264a" response="200" I0125 05:12:51.928529 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.474669ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:52.110230 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD X5NgRSrwacHP }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc4269cff50 0xc4269cff80 /dev/termination-log IfNotPresent 0xc4269cffb0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:52.159770 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:52.159802 4678 prober.go:113] Readiness probe for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:12:52.904433 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:12:52.904840 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:12:52.905461 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:12:52.955574 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:12:52.955600 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:12:52.967199 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:52.967227 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:52.967879 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:12:52.967895 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:52.968640 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:12:52 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc43a7468e0 0 [] true false map[] 0xc435569a40 } I0125 05:12:52.968690 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:52.968779 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Date:[Wed, 25 Jan 2017 10:12:52 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache]] 0xc43a746a00 0 [] true false map[] 0xc4349b63c0 } I0125 05:12:52.968806 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:12:53.347525 4678 audit.go:125] 2017-01-25T05:12:53.347492602-05:00 AUDIT: id="50ad22e7-23ed-48f7-a93e-cf760a85c97c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:53.347951 4678 audit.go:45] 2017-01-25T05:12:53.34794248-05:00 AUDIT: id="50ad22e7-23ed-48f7-a93e-cf760a85c97c" response="200" I0125 05:12:53.348285 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (978.385µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:53.684609 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:54.007710 4678 gc_controller.go:175] GC'ing orphaned I0125 05:12:54.007739 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:12:55.233246 4678 audit.go:125] 2017-01-25T05:12:55.233190498-05:00 AUDIT: id="53acb4ee-17cb-49aa-b1c1-708d90d86405" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:12:55.234232 4678 audit.go:45] 2017-01-25T05:12:55.234220457-05:00 AUDIT: id="53acb4ee-17cb-49aa-b1c1-708d90d86405" response="200" I0125 05:12:55.234323 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.336803ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:55.501156 4678 audit.go:125] 2017-01-25T05:12:55.501119404-05:00 AUDIT: id="a0525d02-3fa4-45b0-b8be-2afdc4c3710b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:55.502126 4678 audit.go:45] 2017-01-25T05:12:55.502114889-05:00 AUDIT: id="a0525d02-3fa4-45b0-b8be-2afdc4c3710b" response="200" I0125 05:12:55.502350 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (2.743143ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:55.505463 4678 audit.go:125] 2017-01-25T05:12:55.505434612-05:00 AUDIT: id="5e235d02-16dd-4196-9768-0825864114ae" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:55.506378 4678 audit.go:45] 2017-01-25T05:12:55.506367184-05:00 AUDIT: id="5e235d02-16dd-4196-9768-0825864114ae" response="200" I0125 05:12:55.506532 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (2.332291ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:55.684624 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:55.700316 4678 audit.go:125] 2017-01-25T05:12:55.700269919-05:00 AUDIT: id="bf45f60b-ee12-4b10-9ddf-3c599b0fd388" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:55.701346 4678 audit.go:45] 2017-01-25T05:12:55.701335132-05:00 AUDIT: id="bf45f60b-ee12-4b10-9ddf-3c599b0fd388" response="200" I0125 05:12:55.701544 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (2.676645ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40680] I0125 05:12:55.710576 4678 audit.go:125] 2017-01-25T05:12:55.710516176-05:00 AUDIT: id="89fef56c-1d49-4374-9e44-5f6b5e058e7c" ip="172.18.7.222" method="POST" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22SELECT+%2A+FROM+table_01%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true" I0125 05:12:55.711384 4678 audit.go:125] 2017-01-25T05:12:55.711361472-05:00 AUDIT: id="664fd8b5-c7ba-43ec-ad4b-68c131ec0a4b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:55.712315 4678 audit.go:45] 2017-01-25T05:12:55.712303446-05:00 AUDIT: id="664fd8b5-c7ba-43ec-ad4b-68c131ec0a4b" response="200" I0125 05:12:55.712416 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (1.236203ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:55.712692 4678 admission.go:77] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{extended-test-postgresql-replication-1-34bbd-xd4g8-user b2ca29b6-e2e6-11e6-a4b0-0e6a5cbf0094 [system:authenticated:oauth system:authenticated] map[authorization.openshift.io/scopes:[user:full]]} I0125 05:12:55.712719 4678 admission.go:88] getting security context constraints for pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:55.713191 4678 audit.go:125] 2017-01-25T05:12:55.713169555-05:00 AUDIT: id="74b0c9f2-0ad7-44fc-9cd8-efffb1854fdc" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:55.714042 4678 audit.go:45] 2017-01-25T05:12:55.714031402-05:00 AUDIT: id="74b0c9f2-0ad7-44fc-9cd8-efffb1854fdc" response="200" I0125 05:12:55.714103 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.107806ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:55.714322 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:55.714333 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:55.714338 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:55.714352 4678 admission.go:149] validating pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) against providers restricted I0125 05:12:55.714397 4678 admission.go:116] pod postgresql-slave-1-qt1rc (generate: postgresql-slave-1-) validated against provider restricted I0125 05:12:55.726128 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"create", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave"} I0125 05:12:55.785049 4678 server.go:744] POST /exec/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc/postgresql-slave?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22SELECT+%2A+FROM+table_01%3B%22&error=1&output=1: (59.135217ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41802] I0125 05:12:55.787605 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/exec?command=bash&command=-c&command=psql+postgres%3A%2F%2Fuser%3AIbyV1wgYrrMd%40127.0.0.1%2Fuserdb+-x+-c+%22SELECT+%2A+FROM+table_01%3B%22&container=postgresql-slave&container=postgresql-slave&stderr=true&stdout=true: (78.491983ms) hijacked [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40682] I0125 05:12:55.980656 4678 audit.go:125] 2017-01-25T05:12:55.980614604-05:00 AUDIT: id="b36e90f6-4853-4a20-9f19-3a5d6ebb3af5" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master" I0125 05:12:55.981723 4678 audit.go:45] 2017-01-25T05:12:55.981710464-05:00 AUDIT: id="b36e90f6-4853-4a20-9f19-3a5d6ebb3af5" response="200" I0125 05:12:55.981938 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master: (2.746803ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40688] I0125 05:12:55.987811 4678 audit.go:125] 2017-01-25T05:12:55.987767543-05:00 AUDIT: id="ce8b55fc-201e-4928-8552-5dd79e0ba44c" ip="172.18.7.222" method="PATCH" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master" I0125 05:12:55.990937 4678 audit.go:45] 2017-01-25T05:12:55.990922501-05:00 AUDIT: id="ce8b55fc-201e-4928-8552-5dd79e0ba44c" response="200" I0125 05:12:55.991101 4678 panics.go:76] PATCH /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master: (4.679484ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:40688] I0125 05:12:55.991502 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:55.991605 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:55.991593497 -0500 EST. I0125 05:12:55.992437 4678 audit.go:125] 2017-01-25T05:12:55.992412041-05:00 AUDIT: id="6a27f9e5-e06a-4d53-9ef2-07665de67049" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:55.994175 4678 audit.go:45] 2017-01-25T05:12:55.994160732-05:00 AUDIT: id="6a27f9e5-e06a-4d53-9ef2-07665de67049" response="200" I0125 05:12:55.994280 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (2.1208ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:55.994581 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 3) I0125 05:12:55.995314 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:55.995389 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:55.995381156 -0500 EST. I0125 05:12:55.995964 4678 audit.go:125] 2017-01-25T05:12:55.995931937-05:00 AUDIT: id="ef6ac688-5772-4ab1-a371-1178c049bdcb" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/instantiate" I0125 05:12:55.997638 4678 audit.go:125] 2017-01-25T05:12:55.997612366-05:00 AUDIT: id="ced37342-6d69-4829-a538-c80f3f06b805" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams/postgresql" I0125 05:12:55.998758 4678 audit.go:45] 2017-01-25T05:12:55.998743893-05:00 AUDIT: id="ced37342-6d69-4829-a538-c80f3f06b805" response="200" I0125 05:12:55.998972 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams/postgresql: (1.5707ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:55.999870 4678 audit.go:125] 2017-01-25T05:12:55.999838172-05:00 AUDIT: id="1869aa40-dd7b-4a43-a82c-062d9c373111" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:12:56.000959 4678 audit.go:45] 2017-01-25T05:12:56.000945089-05:00 AUDIT: id="1869aa40-dd7b-4a43-a82c-062d9c373111" response="200" I0125 05:12:56.001185 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (1.569994ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.001744 4678 rest.go:84] New deployment for "postgresql-master" caused by []api.DeploymentCause{api.DeploymentCause{Type:"ConfigChange", ImageTrigger:(*api.DeploymentCauseImageTrigger)(nil)}} I0125 05:12:56.003233 4678 audit.go:45] 2017-01-25T05:12:56.00321974-05:00 AUDIT: id="ef6ac688-5772-4ab1-a371-1178c049bdcb" response="201" I0125 05:12:56.003351 4678 panics.go:76] POST /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/instantiate: (7.656491ms) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:56.003579 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:56.004664 4678 audit.go:125] 2017-01-25T05:12:56.004635307-05:00 AUDIT: id="9ebb16dd-7b73-4f78-a3d6-f72214507867" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:12:56.005537 4678 audit.go:125] 2017-01-25T05:12:56.00550795-05:00 AUDIT: id="1e3ff39e-f7ce-4123-b971-528cede36809" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas" I0125 05:12:56.006374 4678 audit.go:45] 2017-01-25T05:12:56.006360618-05:00 AUDIT: id="1e3ff39e-f7ce-4123-b971-528cede36809" response="200" I0125 05:12:56.006432 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas: (1.085459ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.008291 4678 audit.go:45] 2017-01-25T05:12:56.008275258-05:00 AUDIT: id="9ebb16dd-7b73-4f78-a3d6-f72214507867" response="201" I0125 05:12:56.009101 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (4.644152ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.009501 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 either never recorded expectations, or the ttl expired. I0125 05:12:56.009542 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 0->0 (need 0), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 0->1 I0125 05:12:56.010418 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:56.010405812 -0500 EST. I0125 05:12:56.012022 4678 audit.go:125] 2017-01-25T05:12:56.011990474-05:00 AUDIT: id="f98804c4-3e1e-4e61-99fa-4bf05eed7521" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:56.013007 4678 audit.go:125] 2017-01-25T05:12:56.012985133-05:00 AUDIT: id="62186230-e23a-460a-9faa-7dca942d1dde" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:56.015947 4678 audit.go:45] 2017-01-25T05:12:56.015936577-05:00 AUDIT: id="f98804c4-3e1e-4e61-99fa-4bf05eed7521" response="200" I0125 05:12:56.016032 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (4.20923ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:56.017507 4678 audit.go:125] 2017-01-25T05:12:56.017469853-05:00 AUDIT: id="165e4603-5ba8-4938-8de0-bbc57c0072c8" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:12:56.018047 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:12:56.018078 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:56.018218 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:56.01818683 -0500 EST. I0125 05:12:56.018434 4678 audit.go:125] 2017-01-25T05:12:56.018402784-05:00 AUDIT: id="d37c79fe-2119-4661-9077-e54925245792" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:12:56.019917 4678 audit.go:125] 2017-01-25T05:12:56.019887071-05:00 AUDIT: id="2ada8ea5-0376-4a5f-90c9-30c7523846f4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges" I0125 05:12:56.020340 4678 audit.go:45] 2017-01-25T05:12:56.020326085-05:00 AUDIT: id="165e4603-5ba8-4938-8de0-bbc57c0072c8" response="200" I0125 05:12:56.021097 4678 audit.go:45] 2017-01-25T05:12:56.021083231-05:00 AUDIT: id="62186230-e23a-460a-9faa-7dca942d1dde" response="201" I0125 05:12:56.021157 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (8.32471ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.021356 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (7.825672ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:12:56.021615 4678 audit.go:45] 2017-01-25T05:12:56.021601199-05:00 AUDIT: id="2ada8ea5-0376-4a5f-90c9-30c7523846f4" response="200" I0125 05:12:56.021623 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:12:56.021678 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges: (2.04461ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.021724 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:56.021713049 -0500 EST. I0125 05:12:56.022148 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (12.651375ms) I0125 05:12:56.022302 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->0 I0125 05:12:56.022332 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 either never recorded expectations, or the ttl expired. I0125 05:12:56.022365 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (39.639µs) I0125 05:12:56.022322 4678 admission.go:77] getting security context constraints for pod postgresql-master-2-deploy (generate: ) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:deploymentconfig-controller ca98311b-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:12:56.022400 4678 admission.go:88] getting security context constraints for pod postgresql-master-2-deploy (generate: ) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:12:56.022787 4678 audit.go:125] 2017-01-25T05:12:56.022763016-05:00 AUDIT: id="73b9fd26-a67c-42c9-b51e-62d0b328d7e0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:12:56.023689 4678 audit.go:45] 2017-01-25T05:12:56.023675628-05:00 AUDIT: id="73b9fd26-a67c-42c9-b51e-62d0b328d7e0" response="200" I0125 05:12:56.023743 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.131574ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.023974 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:56.023983 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:56.023988 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:56.024001 4678 admission.go:149] validating pod postgresql-master-2-deploy (generate: ) against providers restricted I0125 05:12:56.024041 4678 admission.go:116] pod postgresql-master-2-deploy (generate: ) validated against provider restricted I0125 05:12:56.025278 4678 audit.go:45] 2017-01-25T05:12:56.025268187-05:00 AUDIT: id="d37c79fe-2119-4661-9077-e54925245792" response="201" I0125 05:12:56.025329 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (11.43583ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:56.025618 4678 controller.go:128] Created deployer pod postgresql-master-2-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 I0125 05:12:56.026010 4678 factory.go:488] About to try and schedule pod postgresql-master-2-deploy I0125 05:12:56.026018 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy I0125 05:12:56.026174 4678 replication_controller.go:255] No controllers found for pod postgresql-master-2-deploy, replication manager will avoid syncing I0125 05:12:56.026185 4678 replica_set.go:288] Pod postgresql-master-2-deploy created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy", UID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11102", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935976, nsec:24155702, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-master-2"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-master-2", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42c0d6990), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-master-2", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42c0d6a50), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc436cc44d0), ActiveDeadlineSeconds:(*int64)(0xc436cc44d8), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc4369de2c0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:12:56.026519 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-deploy, ReplicaSet controller will avoid syncing I0125 05:12:56.026570 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-deploy, job controller will avoid syncing I0125 05:12:56.026585 4678 daemoncontroller.go:309] Pod postgresql-master-2-deploy added. I0125 05:12:56.026634 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-deploy, daemon set controller will avoid syncing I0125 05:12:56.026647 4678 disruption.go:314] addPod called on pod "postgresql-master-2-deploy" I0125 05:12:56.026682 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:56.026689 4678 disruption.go:317] No matching pdb for pod "postgresql-master-2-deploy" I0125 05:12:56.026732 4678 pet_set.go:160] Pod postgresql-master-2-deploy created, labels: map[openshift.io/deployer-pod-for.name:postgresql-master-2] I0125 05:12:56.026771 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-deploy, StatefulSet controller will avoid syncing I0125 05:12:56.026924 4678 factory.go:648] Attempting to bind postgresql-master-2-deploy to 172.18.7.222 I0125 05:12:56.027597 4678 audit.go:125] 2017-01-25T05:12:56.027563099-05:00 AUDIT: id="4d48a56e-daaa-48e1-9738-d50b4cf016d2" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:12:56.028485 4678 audit.go:125] 2017-01-25T05:12:56.028447568-05:00 AUDIT: id="5b544697-7e97-4ffd-bb24-28af75e79460" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:12:56.029005 4678 audit.go:45] 2017-01-25T05:12:56.02899201-05:00 AUDIT: id="5b544697-7e97-4ffd-bb24-28af75e79460" response="409" I0125 05:12:56.029056 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (2.459291ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:56.029395 4678 audit.go:45] 2017-01-25T05:12:56.029382234-05:00 AUDIT: id="4d48a56e-daaa-48e1-9738-d50b4cf016d2" response="201" I0125 05:12:56.029401 4678 controller.go:155] Detected existing deployer pod postgresql-master-2-deploy for deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 I0125 05:12:56.029443 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (2.118895ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.030617 4678 replication_controller.go:378] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11102 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11103 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:56.030728 4678 replication_controller.go:255] No controllers found for pod postgresql-master-2-deploy, replication manager will avoid syncing I0125 05:12:56.030744 4678 replica_set.go:320] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11102 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11103 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:56.030827 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-deploy, ReplicaSet controller will avoid syncing I0125 05:12:56.030857 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-deploy, job controller will avoid syncing I0125 05:12:56.030873 4678 daemoncontroller.go:332] Pod postgresql-master-2-deploy updated. I0125 05:12:56.030899 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-deploy, daemon set controller will avoid syncing I0125 05:12:56.030915 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-deploy" I0125 05:12:56.030928 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:56.030934 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-deploy" I0125 05:12:56.030986 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-deploy, StatefulSet controller will avoid syncing I0125 05:12:56.031158 4678 config.go:281] Setting pods for source api I0125 05:12:56.031277 4678 audit.go:125] 2017-01-25T05:12:56.031246878-05:00 AUDIT: id="ad56793e-bf51-47e7-b10d-2d7893c65fcb" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:56.032368 4678 config.go:397] Receiving a new pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:56.032618 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:56.032775 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:56.033402 4678 audit.go:125] 2017-01-25T05:12:56.033366978-05:00 AUDIT: id="ae7c3698-f503-4cf6-ba68-b16c6a7fb2aa" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:12:56.033497 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:56.034625 4678 audit.go:45] 2017-01-25T05:12:56.034610867-05:00 AUDIT: id="ad56793e-bf51-47e7-b10d-2d7893c65fcb" response="201" I0125 05:12:56.034679 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.675827ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:56.035079 4678 audit.go:125] 2017-01-25T05:12:56.035047667-05:00 AUDIT: id="cb58f6f4-f211-481d-9f47-878bafc3a812" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy" I0125 05:12:56.036034 4678 audit.go:45] 2017-01-25T05:12:56.036020556-05:00 AUDIT: id="cb58f6f4-f211-481d-9f47-878bafc3a812" response="200" I0125 05:12:56.036101 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy: (1.285855ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:56.036292 4678 audit.go:45] 2017-01-25T05:12:56.036278134-05:00 AUDIT: id="ae7c3698-f503-4cf6-ba68-b16c6a7fb2aa" response="200" I0125 05:12:56.037345 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (7.496586ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:12:56.037451 4678 audit.go:125] 2017-01-25T05:12:56.03741583-05:00 AUDIT: id="bd9d4297-1ef3-4b31-b7aa-68cbfce29e76" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status" I0125 05:12:56.037879 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:12:56.037999 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:56.037984659 -0500 EST. I0125 05:12:56.038399 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->0 I0125 05:12:56.038453 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 either never recorded expectations, or the ttl expired. I0125 05:12:56.038488 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (44.388µs) I0125 05:12:56.038829 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 status from New to Pending (scale: 0) I0125 05:12:56.039476 4678 audit.go:125] 2017-01-25T05:12:56.039442499-05:00 AUDIT: id="a0cf0b9b-91c2-4932-aed3-df844e24acc0" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:12:56.039587 4678 audit.go:45] 2017-01-25T05:12:56.039572251-05:00 AUDIT: id="bd9d4297-1ef3-4b31-b7aa-68cbfce29e76" response="200" I0125 05:12:56.039653 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status: (2.537683ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:56.040144 4678 status_manager.go:425] Status for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935976 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935976 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [deployment]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935976 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc433575160 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting:0xc433575140 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID: ContainerID:}]} version:1 podName:postgresql-master-2-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:56.040343 4678 config.go:281] Setting pods for source api I0125 05:12:56.040788 4678 replication_controller.go:378] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11103 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11106 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:56.040898 4678 replication_controller.go:255] No controllers found for pod postgresql-master-2-deploy, replication manager will avoid syncing I0125 05:12:56.040915 4678 replica_set.go:320] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11103 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11106 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:56.040978 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-deploy, ReplicaSet controller will avoid syncing I0125 05:12:56.041004 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-deploy, job controller will avoid syncing I0125 05:12:56.041021 4678 daemoncontroller.go:332] Pod postgresql-master-2-deploy updated. I0125 05:12:56.041042 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-deploy, daemon set controller will avoid syncing I0125 05:12:56.041062 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-deploy" I0125 05:12:56.041076 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:12:56.041082 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-deploy" I0125 05:12:56.041138 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-deploy, StatefulSet controller will avoid syncing I0125 05:12:56.041791 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:56.042183 4678 audit.go:45] 2017-01-25T05:12:56.04216865-05:00 AUDIT: id="a0cf0b9b-91c2-4932-aed3-df844e24acc0" response="200" I0125 05:12:56.042306 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (3.101979ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:56.042636 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:12:56.042683 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:12:56.042775 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:12:56.042764265 -0500 EST. I0125 05:12:56.226824 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:12:56.327101 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") to pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:56.327160 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:56.327441 4678 empty_dir.go:248] pod d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_deployer-token-r7jj8 I0125 05:12:56.327452 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 tmpfs [] with command: "mount" I0125 05:12:56.327458 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8]) I0125 05:12:56.348778 4678 audit.go:125] 2017-01-25T05:12:56.348730589-05:00 AUDIT: id="6647e0c3-1c60-4f5d-9e21-a625475019ae" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:12:56.350119 4678 audit.go:45] 2017-01-25T05:12:56.350108079-05:00 AUDIT: id="6647e0c3-1c60-4f5d-9e21-a625475019ae" response="200" I0125 05:12:56.350314 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (1.874346ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:56.350512 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:12:56.350575 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy volume deployer-token-r7jj8: write required for target directory /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:12:56.350840 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy volume deployer-token-r7jj8: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8/..1981_25_01_05_12_56.237361708 I0125 05:12:56.351390 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:12:56.431841 4678 container_gc.go:249] Removing container "a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f" name "deployment" I0125 05:12:56.564101 4678 panics.go:76] GET /oapi/v1/watch/imagestreams?resourceVersion=10086&timeoutSeconds=338: (5m38.005188457s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:12:56.565304 4678 reflector.go:392] github.com/openshift/origin/pkg/build/controller/factory/factory.go:304: Watch close - *api.ImageStream total 5 items received I0125 05:12:56.566034 4678 audit.go:125] 2017-01-25T05:12:56.565992098-05:00 AUDIT: id="a91d4027-fb23-433f-bca0-bea99e4cfb3e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/watch/imagestreams?resourceVersion=10739&timeoutSeconds=336" I0125 05:12:56.566654 4678 audit.go:45] 2017-01-25T05:12:56.566639831-05:00 AUDIT: id="a91d4027-fb23-433f-bca0-bea99e4cfb3e" response="200" I0125 05:12:56.583118 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:56.583142 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:12:56.636333 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:56.637128 4678 audit.go:125] 2017-01-25T05:12:56.637079174-05:00 AUDIT: id="3509cf09-7a7d-4a34-99a6-c5a3a539c798" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:12:56.638505 4678 audit.go:45] 2017-01-25T05:12:56.638489249-05:00 AUDIT: id="3509cf09-7a7d-4a34-99a6-c5a3a539c798" response="200" I0125 05:12:56.638704 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.911349ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:56.638953 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:12:56.639021 4678 docker_manager.go:1992] Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-master-2 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc435b5af30 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:12:56.639048 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-master-2 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc435b5af30 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:12:56.639079 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:12:56.639106 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:56.768529 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:12:57.002754 4678 audit.go:125] 2017-01-25T05:12:57.002700014-05:00 AUDIT: id="6c095d94-39ca-419d-a3b6-c46556f78189" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:57.003948 4678 audit.go:45] 2017-01-25T05:12:57.003932837-05:00 AUDIT: id="6c095d94-39ca-419d-a3b6-c46556f78189" response="200" I0125 05:12:57.004221 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (3.395419ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:57.114857 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy/POD podIP: "" creating hosts mount: false I0125 05:12:57.115181 4678 container_gc.go:249] Removing container "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b" name "POD" I0125 05:12:57.206615 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy/POD: setting entrypoint "[]" and command "[]" I0125 05:12:57.430165 4678 generic.go:145] GenericPLEG: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b: exited -> unknown I0125 05:12:57.430208 4678 generic.go:145] GenericPLEG: b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f: exited -> non-existent I0125 05:12:57.479879 4678 audit.go:125] 2017-01-25T05:12:57.479844063-05:00 AUDIT: id="e9c003a7-5188-4652-bfa6-6d31230ab0c2" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:12:57.480308 4678 audit.go:45] 2017-01-25T05:12:57.480297449-05:00 AUDIT: id="e9c003a7-5188-4652-bfa6-6d31230ab0c2" response="200" I0125 05:12:57.480618 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (985.594µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:57.508966 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc429373a20 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/ce4eccb1 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc4344a9d40 NetworkSettings:0xc435183400} I0125 05:12:57.509232 4678 container_gc.go:249] Removing container "b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a" name "POD" I0125 05:12:57.563491 4678 audit.go:125] 2017-01-25T05:12:57.563448376-05:00 AUDIT: id="d652a92f-b732-4e2e-a30c-0ec2ac3f5c49" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:12:57.566930 4678 audit.go:45] 2017-01-25T05:12:57.566906659-05:00 AUDIT: id="d652a92f-b732-4e2e-a30c-0ec2ac3f5c49" response="200" I0125 05:12:57.567885 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (4.738231ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:57.568533 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" E0125 05:12:57.575019 4678 docker_manager.go:2623] Unable to inspect container "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b": no such container: "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b" I0125 05:12:57.575060 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc435bbf960), (*container.ContainerStatus)(0xc438094a80)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:57.631573 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42dcceb00 Mounts:[] Config:0xc431c5db00 NetworkSettings:0xc430f2e200} I0125 05:12:57.685168 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:57.990920 4678 container_gc.go:249] Removing container "57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d" name "deployment" I0125 05:12:58.003477 4678 audit.go:125] 2017-01-25T05:12:58.003403165-05:00 AUDIT: id="d0a87015-52ad-4d46-b754-f72e70850797" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:58.005335 4678 audit.go:45] 2017-01-25T05:12:58.005315521-05:00 AUDIT: id="d0a87015-52ad-4d46-b754-f72e70850797" response="200" I0125 05:12:58.005599 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (4.029127ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:58.349790 4678 audit.go:125] 2017-01-25T05:12:58.349754731-05:00 AUDIT: id="e65eb62b-feaf-40de-a5ef-43671472a71b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:12:58.350219 4678 audit.go:45] 2017-01-25T05:12:58.350190096-05:00 AUDIT: id="e65eb62b-feaf-40de-a5ef-43671472a71b" response="200" I0125 05:12:58.350572 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.032464ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:58.350835 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:12:58.435785 4678 panics.go:76] GET /api/v1/watch/nodes?resourceVersion=9811&timeoutSeconds=583: (9m43.000906993s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:12:58.436043 4678 reflector.go:392] github.com/openshift/origin/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/factory/factory.go:463: Watch close - *api.Node total 58 items received I0125 05:12:58.436782 4678 audit.go:125] 2017-01-25T05:12:58.436739526-05:00 AUDIT: id="d8cd0687-0b9e-45a3-848d-3e6fdd16fc67" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/nodes?resourceVersion=11108&timeoutSeconds=310" I0125 05:12:58.437311 4678 audit.go:45] 2017-01-25T05:12:58.437298083-05:00 AUDIT: id="d8cd0687-0b9e-45a3-848d-3e6fdd16fc67" response="200" I0125 05:12:58.680320 4678 kubelet.go:1138] Container garbage collection succeeded I0125 05:12:58.730109 4678 generic.go:342] PLEG: Write status for postgresql-master-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42b84c1c0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:58.936164 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d.scope" E0125 05:12:58.941684 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d.log: no such file or directory I0125 05:12:58.941725 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:12:58.941777 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:58.987240 4678 hairpin.go:110] Enabling hairpin on interface veth8103d18 I0125 05:12:58.987561 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.4" I0125 05:12:58.987588 4678 docker_manager.go:2293] Creating container &{Name:deployment Image:openshift/origin-deployer:86a9783 Command:[] Args:[] WorkingDir: Ports:[] Env:[{Name:KUBERNETES_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:OPENSHIFT_MASTER Value:https://172.18.7.222:8443 ValueFrom:} {Name:BEARER_TOKEN_FILE Value:/var/run/secrets/kubernetes.io/serviceaccount/token ValueFrom:} {Name:OPENSHIFT_CA_DATA Value:-----BEGIN CERTIFICATE----- MIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu c2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0 MDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH 6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp MBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ xPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq W/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4 LsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t AgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+ TBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+ OrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW JDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ 4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5 WymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M -----END CERTIFICATE----- ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAME Value:postgresql-master-2 ValueFrom:} {Name:OPENSHIFT_DEPLOYMENT_NAMESPACE Value:extended-test-postgresql-replication-1-34bbd-xd4g8 ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:deployer-token-r7jj8 ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe: ReadinessProbe: Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc435b5af30 Stdin:false StdinOnce:false TTY:false} in pod postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:12:58.990850 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy/deployment podIP: "172.17.0.4" creating hosts mount: true I0125 05:12:58.992040 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-deploy", UID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11103", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Pulled' Container image "openshift/origin-deployer:86a9783" already present on machine I0125 05:12:58.992734 4678 audit.go:125] 2017-01-25T05:12:58.992690219-05:00 AUDIT: id="9d203ddd-28ee-44e5-bc7c-657ecbe0ccbf" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:58.995889 4678 audit.go:45] 2017-01-25T05:12:58.995869926-05:00 AUDIT: id="9d203ddd-28ee-44e5-bc7c-657ecbe0ccbf" response="201" I0125 05:12:58.995963 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.582936ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:59.003291 4678 audit.go:125] 2017-01-25T05:12:59.003243842-05:00 AUDIT: id="08d77072-a878-434f-a185-6f2d195f025b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:12:59.005089 4678 audit.go:45] 2017-01-25T05:12:59.005072012-05:00 AUDIT: id="08d77072-a878-434f-a185-6f2d195f025b" response="200" I0125 05:12:59.005375 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (3.904987ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:12:59.113226 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy/deployment: setting entrypoint "[]" and command "[]" I0125 05:12:59.138118 4678 manager.go:898] Added container: "/system.slice/docker-2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d.scope" (aliases: [k8s_POD.f321dce3_postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094_f5d33a89 2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d], namespace: "docker") I0125 05:12:59.138363 4678 handler.go:325] Added event &{/system.slice/docker-2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d.scope 2017-01-25 05:12:58.810976334 -0500 EST containerCreation {}} I0125 05:12:59.138453 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-d71573a5\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-deployer\x2dtoken\x2dr7jj8.mount: invalid container name I0125 05:12:59.138465 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-d71573a5\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:12:59.138482 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-d71573a5\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount", but ignoring. I0125 05:12:59.138498 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-d71573a5\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-deployer\\x2dtoken\\x2dr7jj8.mount" I0125 05:12:59.138529 4678 container.go:407] Start housekeeping for container "/system.slice/docker-2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d.scope" I0125 05:12:59.201618 4678 audit.go:125] 2017-01-25T05:12:59.201579391-05:00 AUDIT: id="393934a8-7818-405c-bd2b-61f6e34c0af7" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:12:59.202684 4678 audit.go:45] 2017-01-25T05:12:59.202669953-05:00 AUDIT: id="393934a8-7818-405c-bd2b-61f6e34c0af7" response="200" I0125 05:12:59.202782 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (4.014686ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:59.203089 4678 controller.go:106] Found 0 cronjobs I0125 05:12:59.207298 4678 audit.go:125] 2017-01-25T05:12:59.207260306-05:00 AUDIT: id="55286c9b-8214-4c7a-8bc5-6b0776ef330c" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:12:59.208510 4678 audit.go:45] 2017-01-25T05:12:59.208494453-05:00 AUDIT: id="55286c9b-8214-4c7a-8bc5-6b0776ef330c" response="200" I0125 05:12:59.208607 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (5.243326ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:12:59.208886 4678 controller.go:114] Found 0 jobs I0125 05:12:59.208896 4678 controller.go:117] Found 0 groups I0125 05:12:59.262486 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:59.262523 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:59.263116 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:12:59.263132 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:12:59.264274 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc43533d440 -1 [] true false map[] 0xc436d70d20 } I0125 05:12:59.264330 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:59.264415 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc43533d520 -1 [] true false map[] 0xc436d70f00 } I0125 05:12:59.264443 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:12:59.452746 4678 docker_manager.go:1577] Container "1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48 postgresql-slave extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc" exited after 30.266657014s I0125 05:12:59.453365 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11002", FieldPath:"spec.containers{postgresql-slave}"}): type: 'Normal' reason: 'Killing' Killing container with docker id 1122e1bd8a66: Need to kill pod. I0125 05:12:59.456893 4678 audit.go:125] 2017-01-25T05:12:59.456854725-05:00 AUDIT: id="5a9ee592-0cd7-4f24-966c-73e0fa789813" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:59.459339 4678 audit.go:45] 2017-01-25T05:12:59.459326747-05:00 AUDIT: id="5a9ee592-0cd7-4f24-966c-73e0fa789813" response="201" I0125 05:12:59.459392 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.780885ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:59.514085 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:12:59.515739 4678 docker_manager.go:1536] Killing container "68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc" with 30 second grace period I0125 05:12:59.541020 4678 manager.go:955] Destroyed container: "/system.slice/docker-68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2.scope" (aliases: [k8s_POD.73b4fecf_postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8_b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094_858b7658 68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2], namespace: "docker") I0125 05:12:59.541074 4678 handler.go:325] Added event &{/system.slice/docker-68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2.scope 2017-01-25 05:12:59.541061542 -0500 EST containerDeletion {}} I0125 05:12:59.684635 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:12:59.704751 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-deploy", UID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11103", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Created' Created container with docker id 3d4f90b50c8d; Security:[seccomp=unconfined] I0125 05:12:59.705484 4678 audit.go:125] 2017-01-25T05:12:59.705439977-05:00 AUDIT: id="8d0cafdf-b1d6-4bdd-adfc-76ce447b948a" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:59.708548 4678 audit.go:45] 2017-01-25T05:12:59.708533617-05:00 AUDIT: id="8d0cafdf-b1d6-4bdd-adfc-76ce447b948a" response="201" I0125 05:12:59.708603 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.448588ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:59.748823 4678 docker_manager.go:1577] Container "68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc" exited after 233.048034ms I0125 05:12:59.877655 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28.scope" I0125 05:12:59.878267 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-deploy", UID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11103", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Started' Started container with docker id 3d4f90b50c8d I0125 05:12:59.878872 4678 audit.go:125] 2017-01-25T05:12:59.878829355-05:00 AUDIT: id="66311772-7e73-4ede-a354-896b7df1788e" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:12:59.879026 4678 generic.go:145] GenericPLEG: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/57a48f33a7e98a6f9717acc61afef50cf3addd12de4cc12b2736292213e6906d: exited -> non-existent I0125 05:12:59.879043 4678 generic.go:145] GenericPLEG: b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094/c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b: unknown -> non-existent I0125 05:12:59.879053 4678 generic.go:145] GenericPLEG: b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094/b1f9b804ccbc1803ef7a7e7a4f2eea640ca39ebc11e7426e2ba0dfac10e59d2a: exited -> non-existent I0125 05:12:59.879066 4678 generic.go:145] GenericPLEG: b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48: running -> exited I0125 05:12:59.879079 4678 generic.go:145] GenericPLEG: b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2: running -> exited I0125 05:12:59.879098 4678 generic.go:145] GenericPLEG: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28: non-existent -> running I0125 05:12:59.879114 4678 generic.go:145] GenericPLEG: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d: non-existent -> running I0125 05:12:59.881768 4678 audit.go:45] 2017-01-25T05:12:59.881750172-05:00 AUDIT: id="66311772-7e73-4ede-a354-896b7df1788e" response="201" I0125 05:12:59.881826 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.278821ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] E0125 05:12:59.894821 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment": symlink /var/log/containers/postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_deployment-3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28.log: no such file or directory I0125 05:12:59.898840 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42e9beb00 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql-slave/089e472a Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~empty-dir/postgresql-data Destination:/var/lib/pgsql/data Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate}] Config:0xc42f1985a0 NetworkSettings:0xc42acfda00} I0125 05:12:59.907720 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42b28b340 Mounts:[] Config:0xc42ffd0900 NetworkSettings:0xc4335ead00} I0125 05:12:59.929599 4678 generic.go:342] PLEG: Write status for postgresql-slave-1-qt1rc/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-slave-1-qt1rc", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc428e728c0), (*container.ContainerStatus)(0xc42b84cfc0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:12:59.929773 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:59.930148 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48"} I0125 05:12:59.930191 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2"} I0125 05:12:59.931089 4678 audit.go:125] 2017-01-25T05:12:59.931050438-05:00 AUDIT: id="c1a15c41-a7d4-411b-b91a-cab6ccaa31d7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:59.932526 4678 audit.go:45] 2017-01-25T05:12:59.932511556-05:00 AUDIT: id="c1a15c41-a7d4-411b-b91a-cab6ccaa31d7" response="200" I0125 05:12:59.932648 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (2.119479ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:59.933642 4678 audit.go:125] 2017-01-25T05:12:59.933609772-05:00 AUDIT: id="8ebd3f4f-cc70-4e75-a002-31751d69c1fd" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status" I0125 05:12:59.935909 4678 audit.go:45] 2017-01-25T05:12:59.935895075-05:00 AUDIT: id="8ebd3f4f-cc70-4e75-a002-31751d69c1fd" response="200" I0125 05:12:59.935985 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc/status: (2.609306ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:12:59.937377 4678 replication_controller.go:378] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11093 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc424399428 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11113 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc438c96408 Labels:map[deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:59.937470 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST, labels map[name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave]. I0125 05:12:59.937606 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc in state Running, deletion time 2017-01-25 05:12:31.005970671 -0500 EST I0125 05:12:59.937629 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (52.094µs) I0125 05:12:59.937658 4678 replica_set.go:320] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11093 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc424399428 Labels:map[deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11113 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc438c96408 Labels:map[deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:59.937724 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:12:31.005970671 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-qt1rc", GenerateName:"postgresql-slave-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11113", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935920, nsec:921802332, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc432acb5a0), DeletionGracePeriodSeconds:(*int64)(0xc438c96408), Labels:map[string]string{"app":"pg-replica-example", "deployment":"postgresql-slave-1", "deploymentconfig":"postgresql-slave", "name":"postgresql-slave"}, Annotations:map[string]string{"openshift.io/deployment-config.name":"postgresql-slave", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-slave-1\",\"uid\":\"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"10998\"}}\n", "openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc438c964a0), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42f968fc0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-slave", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-slave"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_SERVICE_NAME", Value:"postgresql-master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42f969050), ReadinessProbe:(*api.Probe)(0xc42f969080), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42f9690b0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc438c965a0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc433952480), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935979, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-slave]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc432acba20), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-slave", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc4383cde30)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48"}}}}. I0125 05:12:59.938027 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:59.938059 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:59.938076 4678 daemoncontroller.go:332] Pod postgresql-slave-1-qt1rc updated. I0125 05:12:59.938110 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:59.938131 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:59.938148 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:59.938154 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:59.938497 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:59.938969 4678 status_manager.go:425] Status for pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935979 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-slave]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935920 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc425ae2000 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-slave State:{Waiting: Running: Terminated:0xc437ee4cb0} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48}]} version:4 podName:postgresql-slave-1-qt1rc podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:12:59.939025 4678 status_manager.go:441] Removing Pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" from etcd I0125 05:12:59.939336 4678 config.go:281] Setting pods for source api I0125 05:12:59.940797 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:12:59.943357 4678 audit.go:125] 2017-01-25T05:12:59.943314516-05:00 AUDIT: id="6af57560-f87a-4d0e-8efd-42ee0b2d546c" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:59.944322 4678 audit.go:45] 2017-01-25T05:12:59.944308899-05:00 AUDIT: id="6af57560-f87a-4d0e-8efd-42ee0b2d546c" response="200" I0125 05:12:59.944391 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.060572ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:59.944640 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:12:59.946606 4678 audit.go:125] 2017-01-25T05:12:59.946575257-05:00 AUDIT: id="12f846ad-82bb-4e3b-a9da-e0c799fa1681" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:12:59.948382 4678 audit.go:45] 2017-01-25T05:12:59.948369263-05:00 AUDIT: id="12f846ad-82bb-4e3b-a9da-e0c799fa1681" response="200" I0125 05:12:59.948431 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.433756ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:12:59.948861 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:12:59.949028 4678 proxier.go:804] Syncing iptables rules I0125 05:12:59.949038 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:12:59.978462 4678 audit.go:125] 2017-01-25T05:12:59.978358153-05:00 AUDIT: id="b9f671fc-7d6a-4f35-82a9-e26c77f93f32" ip="172.18.7.222" method="DELETE" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc" I0125 05:12:59.979374 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:59.979585 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:59.979786 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:59.979809 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:12:59.979822 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:12:59.979906 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:59.979932 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:12:59.979952 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:12:59.979971 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:12:59.980184 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (41.947346ms) I0125 05:12:59.983723 4678 helpers.go:101] Unable to get network stats from pid 10368: couldn't read network stats: failure opening /proc/10368/net/dev: open /proc/10368/net/dev: no such file or directory I0125 05:12:59.991590 4678 replication_controller.go:378] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11113 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc438c96408 Labels:map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11115 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:01.005970671 -0500 EST DeletionGracePeriodSeconds:0xc4371f9570 Labels:map[deploymentconfig:postgresql-slave name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:59.991755 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:12:01.005970671 -0500 EST, labels map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave]. I0125 05:12:59.992006 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc in state Running, deletion time 2017-01-25 05:12:01.005970671 -0500 EST I0125 05:12:59.992044 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (92.231µs) I0125 05:12:59.992112 4678 replica_set.go:320] Pod postgresql-slave-1-qt1rc updated, objectMeta {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11113 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:31.005970671 -0500 EST DeletionGracePeriodSeconds:0xc438c96408 Labels:map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-qt1rc GenerateName:postgresql-slave-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc UID:b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11115 Generation:0 CreationTimestamp:2017-01-25 05:12:00.921802332 -0500 EST DeletionTimestamp:2017-01-25 05:12:01.005970671 -0500 EST DeletionGracePeriodSeconds:0xc4371f9570 Labels:map[app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave name:postgresql-slave] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-slave-1","uid":"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"10998"}} openshift.io/deployment.name:postgresql-slave-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-slave] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:12:59.992224 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:12:01.005970671 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-qt1rc", GenerateName:"postgresql-slave-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11115", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935920, nsec:921802332, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc43480d160), DeletionGracePeriodSeconds:(*int64)(0xc4371f9570), Labels:map[string]string{"app":"pg-replica-example", "deployment":"postgresql-slave-1", "deploymentconfig":"postgresql-slave", "name":"postgresql-slave"}, Annotations:map[string]string{"openshift.io/deployment-config.name":"postgresql-slave", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-slave-1\",\"uid\":\"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"10998\"}}\n", "openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc4371f9610), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc432653290), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-slave", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-slave"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_SERVICE_NAME", Value:"postgresql-master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc432653500), ReadinessProbe:(*api.Probe)(0xc432653530), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc432653560), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc4371f9710), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc431e76640), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935979, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-slave]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc43480d4e0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-slave", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc433b3e1c0)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48"}}}}. I0125 05:12:59.992693 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:12:59.992764 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:12:59.992810 4678 daemoncontroller.go:332] Pod postgresql-slave-1-qt1rc updated. I0125 05:12:59.992878 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:12:59.992926 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-qt1rc" I0125 05:12:59.992957 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:12:59.992963 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:12:59.993402 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:12:59.999896 4678 audit.go:45] 2017-01-25T05:12:59.999877363-05:00 AUDIT: id="b9f671fc-7d6a-4f35-82a9-e26c77f93f32" response="200" I0125 05:13:00.000057 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc: (58.906594ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:00.001844 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:12:01.005970671 -0500 EST, labels map[name:postgresql-slave app:pg-replica-example deployment:postgresql-slave-1 deploymentconfig:postgresql-slave]. I0125 05:13:00.001999 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (43.764µs) I0125 05:13:00.002031 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:12:01.005970671 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-qt1rc", GenerateName:"postgresql-slave-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-qt1rc", UID:"b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11116", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935920, nsec:921802332, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc438e603a0), DeletionGracePeriodSeconds:(*int64)(0xc4380586d8), Labels:map[string]string{"app":"pg-replica-example", "deployment":"postgresql-slave-1", "deploymentconfig":"postgresql-slave", "name":"postgresql-slave"}, Annotations:map[string]string{"kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-slave-1\",\"uid\":\"b386ec05-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"10998\"}}\n", "openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-slave"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc438058770), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc433cf4e10), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-slave", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-slave"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_SERVICE_NAME", Value:"postgresql-master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc433cf4ea0), ReadinessProbe:(*api.Probe)(0xc433cf4ed0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc433cf4f00), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc438058880), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc42ffa1f00), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935979, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-slave]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935920, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc438e606c0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-slave", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc43365d340)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48"}}}}. I0125 05:13:00.002423 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-qt1rc, ReplicaSet controller will avoid syncing I0125 05:13:00.002457 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-qt1rc, job controller will avoid syncing I0125 05:13:00.002472 4678 daemoncontroller.go:367] Pod postgresql-slave-1-qt1rc deleted. I0125 05:13:00.002501 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-qt1rc, daemon set controller will avoid syncing I0125 05:13:00.002514 4678 disruption.go:355] deletePod called on pod "postgresql-slave-1-qt1rc" I0125 05:13:00.002528 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-qt1rc, PodDisruptionBudget controller will avoid syncing. I0125 05:13:00.002532 4678 disruption.go:358] No matching pdb for pod "postgresql-slave-1-qt1rc" I0125 05:13:00.002588 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-qt1rc deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:13:00.002619 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-qt1rc, StatefulSet controller will avoid syncing I0125 05:13:00.003317 4678 status_manager.go:443] Pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" fully terminated and removed from etcd I0125 05:13:00.004182 4678 audit.go:125] 2017-01-25T05:13:00.004141601-05:00 AUDIT: id="d6968093-8ee6-473c-8711-c13c60d5bb3d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:00.005419 4678 audit.go:45] 2017-01-25T05:13:00.005399104-05:00 AUDIT: id="d6968093-8ee6-473c-8711-c13c60d5bb3d" response="200" I0125 05:13:00.005508 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (9.572052ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.005825 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:13:00.008264 4678 audit.go:125] 2017-01-25T05:13:00.008224775-05:00 AUDIT: id="41558b22-2011-4ab3-a005-aa4daea4b2e5" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:00.008955 4678 audit.go:45] 2017-01-25T05:13:00.008942479-05:00 AUDIT: id="41558b22-2011-4ab3-a005-aa4daea4b2e5" response="200" I0125 05:13:00.009022 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (2.767647ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.009296 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (16.248771ms) I0125 05:13:00.013860 4678 audit.go:125] 2017-01-25T05:13:00.013815178-05:00 AUDIT: id="07ed6ec5-468b-477b-8f98-7d5f016d5ec1" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:00.015948 4678 audit.go:45] 2017-01-25T05:13:00.015931683-05:00 AUDIT: id="07ed6ec5-468b-477b-8f98-7d5f016d5ec1" response="200" I0125 05:13:00.016034 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (6.390408ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.016936 4678 audit.go:125] 2017-01-25T05:13:00.016896634-05:00 AUDIT: id="cb13813b-8175-465c-a16e-697143d7229e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:00.017461 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:13:00.017787 4678 config.go:281] Setting pods for source api I0125 05:13:00.018260 4678 audit.go:45] 2017-01-25T05:13:00.018244671-05:00 AUDIT: id="cb13813b-8175-465c-a16e-697143d7229e" response="200" I0125 05:13:00.018585 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (6.059142ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:00.019643 4678 config.go:281] Setting pods for source api I0125 05:13:00.021046 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.021111 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.021437 4678 audit.go:125] 2017-01-25T05:13:00.021395647-05:00 AUDIT: id="f1c7aea3-df12-4518-a4e5-8d7943bf3bb9" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:00.022071 4678 audit.go:45] 2017-01-25T05:13:00.022052226-05:00 AUDIT: id="f1c7aea3-df12-4518-a4e5-8d7943bf3bb9" response="200" I0125 05:13:00.022122 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.090142ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.031053 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (21.718995ms) I0125 05:13:00.031234 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:00.047331 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42c463ce0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/08cf920c Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42e54eea0 NetworkSettings:0xc42dc7b000} I0125 05:13:00.061444 4678 kubelet.go:1976] Failed to delete pod "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)", err: pod not found I0125 05:13:00.068822 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.087099 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42f5b22c0 Mounts:[] Config:0xc42e54f200 NetworkSettings:0xc42dc7b400} I0125 05:13:00.105449 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.123979 4678 generic.go:342] PLEG: Write status for postgresql-master-2-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.4", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc436f03420), (*container.ContainerStatus)(0xc436f037a0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:00.124096 4678 generic.go:333] PLEG: Delete status for pod "b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:13:00.124117 4678 generic.go:333] PLEG: Delete status for pod "b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:13:00.124214 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28"} I0125 05:13:00.124328 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d"} I0125 05:13:00.124368 4678 kubelet.go:1820] SyncLoop (PLEG): ignore irrelevant event: &pleg.PodLifecycleEvent{ID:"b3e8deb7-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b"} W0125 05:13:00.124395 4678 pod_container_deletor.go:77] Container "c3d3fd396d9e194139fbd6e49092f77270040582f737e83c81b6044beb0a652b" not found in pod's containers I0125 05:13:00.124435 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.124729 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.125404 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1%2Cdeploymentconfig%3Dpostgresql-slave%2Cname%3Dpostgresql-slave&resourceVersion=11005&timeoutSeconds=537: (59.096002844s) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:13:00.126350 4678 audit.go:125] 2017-01-25T05:13:00.126292879-05:00 AUDIT: id="87b5032f-b78a-4725-9422-8d74c454aa36" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy" I0125 05:13:00.128862 4678 audit.go:45] 2017-01-25T05:13:00.128846334-05:00 AUDIT: id="87b5032f-b78a-4725-9422-8d74c454aa36" response="200" I0125 05:13:00.128998 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy: (3.124538ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:00.131168 4678 audit.go:125] 2017-01-25T05:13:00.131093734-05:00 AUDIT: id="5b6ff730-1b3d-4066-89e2-e3a3887224ee" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status" I0125 05:13:00.136178 4678 audit.go:125] 2017-01-25T05:13:00.136075589-05:00 AUDIT: id="a4b253a0-cdc2-46fd-b7f4-3e109af6fa39" ip="172.17.0.2" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-slave-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db386ec05-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:13:00.137062 4678 audit.go:45] 2017-01-25T05:13:00.13702418-05:00 AUDIT: id="5b6ff730-1b3d-4066-89e2-e3a3887224ee" response="200" I0125 05:13:00.137264 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status: (6.563813ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:00.138872 4678 status_manager.go:425] Status for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935976 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935980 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935976 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.4 StartTime:0xc433575160 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc433c14e60 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28}]} version:2 podName:postgresql-master-2-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:00.139376 4678 config.go:281] Setting pods for source api I0125 05:13:00.141642 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.143317 4678 replication_controller.go:378] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11106 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11117 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:00.143547 4678 replication_controller.go:255] No controllers found for pod postgresql-master-2-deploy, replication manager will avoid syncing I0125 05:13:00.143626 4678 replica_set.go:320] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11106 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11117 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:00.143823 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-deploy, ReplicaSet controller will avoid syncing I0125 05:13:00.143944 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-deploy, job controller will avoid syncing I0125 05:13:00.144015 4678 daemoncontroller.go:332] Pod postgresql-master-2-deploy updated. I0125 05:13:00.144061 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-deploy, daemon set controller will avoid syncing I0125 05:13:00.144117 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-deploy" I0125 05:13:00.144149 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:13:00.144156 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-deploy" I0125 05:13:00.144352 4678 audit.go:45] 2017-01-25T05:13:00.144306507-05:00 AUDIT: id="a4b253a0-cdc2-46fd-b7f4-3e109af6fa39" response="200" I0125 05:13:00.144366 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-deploy, StatefulSet controller will avoid syncing I0125 05:13:00.144670 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-slave-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db386ec05-e2e6-11e6-a4b0-0e6a5cbf0094: (14.562298ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.2:37586] I0125 05:13:00.159398 4678 audit.go:125] 2017-01-25T05:13:00.159343976-05:00 AUDIT: id="91deb423-fb3c-40b8-adbe-5394e999b216" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:00.163140 4678 audit.go:45] 2017-01-25T05:13:00.163112847-05:00 AUDIT: id="91deb423-fb3c-40b8-adbe-5394e999b216" response="200" I0125 05:13:00.163454 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (17.742284ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:13:00.163917 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 status from Pending to Running (scale: 0) I0125 05:13:00.165314 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:13:00.165434 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:00.165422089 -0500 EST. I0125 05:13:00.165954 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->0 I0125 05:13:00.166004 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 either never recorded expectations, or the ttl expired. I0125 05:13:00.166034 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (41.7µs) I0125 05:13:00.174587 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.201650 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") from pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:00.201850 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/empty-dir/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-data" (spec.Name: "postgresql-data") from pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:00.202144 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:00.202185 4678 util.go:340] Tearing down volume default-token-0g2nw for pod b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:00.202345 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:00.242335 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:00.272516 4678 audit.go:125] 2017-01-25T05:13:00.272459086-05:00 AUDIT: id="b6b9679c-83f5-48ae-a011-7e24f8b6bfb7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:13:00.275310 4678 audit.go:45] 2017-01-25T05:13:00.275290074-05:00 AUDIT: id="b6b9679c-83f5-48ae-a011-7e24f8b6bfb7" response="200" I0125 05:13:00.275530 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (3.417458ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:00.275780 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:13:00.276001 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:00.276174 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:00.276905 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (OuterVolumeSpecName: "default-token-0g2nw") pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "default-token-0g2nw". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:13:00.277030 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:00.309347 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:00.338279 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:00.358976 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-data" (OuterVolumeSpecName: "postgresql-data") pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "postgresql-data". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" I0125 05:13:00.381356 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:00.381391 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:00.409943 4678 proxier.go:797] syncProxyRules took 460.909674ms I0125 05:13:00.409974 4678 proxier.go:566] OnEndpointsUpdate took 461.035318ms for 6 endpoints I0125 05:13:00.410020 4678 proxier.go:381] Received update notice: [] I0125 05:13:00.410055 4678 proxier.go:804] Syncing iptables rules I0125 05:13:00.410065 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:00.425488 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.427824 4678 audit.go:125] 2017-01-25T05:13:00.427770855-05:00 AUDIT: id="6700c336-25e1-4681-a118-a97eb3c6de29" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:13:00.429722 4678 audit.go:45] 2017-01-25T05:13:00.4296877-05:00 AUDIT: id="6700c336-25e1-4681-a118-a97eb3c6de29" response="200" I0125 05:13:00.429977 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (2.512993ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:00.430323 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.430396 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.430413 4678 docker_manager.go:1999] pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as 3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28 I0125 05:13:00.430504 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d:-1 3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28:0]} I0125 05:13:00.439304 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:00.471389 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.504000 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.537538 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.565310 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:00.594166 4678 manager.go:898] Added container: "/system.slice/docker-3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28.scope" (aliases: [k8s_deployment.7bb3d39b_postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094_08cf920c 3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28], namespace: "docker") I0125 05:13:00.594407 4678 handler.go:325] Added event &{/system.slice/docker-3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28.scope 2017-01-25 05:12:59.807978133 -0500 EST containerCreation {}} I0125 05:13:00.594463 4678 container.go:407] Start housekeeping for container "/system.slice/docker-3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28.scope" I0125 05:13:00.629338 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:00.662388 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:00.695755 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:00.730724 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:00.730769 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:00.747752 4678 audit.go:125] 2017-01-25T05:13:00.747689805-05:00 AUDIT: id="c6356d8b-97c9-4dac-828d-f243dcf229f4" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:00.749310 4678 audit.go:45] 2017-01-25T05:13:00.749295074-05:00 AUDIT: id="c6356d8b-97c9-4dac-828d-f243dcf229f4" response="200" I0125 05:13:00.749839 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (5.537746ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:00.755656 4678 audit.go:125] 2017-01-25T05:13:00.755611101-05:00 AUDIT: id="4d1a496c-ee92-49af-a559-fcf2907f7993" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-master" I0125 05:13:00.757544 4678 audit.go:45] 2017-01-25T05:13:00.757528618-05:00 AUDIT: id="4d1a496c-ee92-49af-a559-fcf2907f7993" response="200" I0125 05:13:00.758149 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-master: (5.675899ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:00.762017 4678 proxier.go:797] syncProxyRules took 351.958716ms I0125 05:13:00.762038 4678 proxier.go:431] OnServiceUpdate took 352.004703ms for 4 services I0125 05:13:00.763936 4678 audit.go:125] 2017-01-25T05:13:00.763902979-05:00 AUDIT: id="ccbcac66-abe7-4a09-be9a-104e776679fa" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:13:00.764968 4678 audit.go:45] 2017-01-25T05:13:00.764957212-05:00 AUDIT: id="ccbcac66-abe7-4a09-be9a-104e776679fa" response="200" I0125 05:13:00.765339 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (3.084455ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:00.769423 4678 audit.go:125] 2017-01-25T05:13:00.769378575-05:00 AUDIT: id="b5809f90-5fda-466f-8111-a096b6040bc8" ip="172.17.0.4" method="PUT" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:13:00.792617 4678 audit.go:45] 2017-01-25T05:13:00.792592561-05:00 AUDIT: id="b5809f90-5fda-466f-8111-a096b6040bc8" response="200" I0125 05:13:00.794276 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (26.406924ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:00.794859 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 1->0 I0125 05:13:00.794959 4678 replication_controller.go:585] Too many "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-master-1" replicas, need 0, deleting 1 I0125 05:13:00.794980 4678 controller_utils.go:306] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 waiting on deletions for: [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj] I0125 05:13:00.795010 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:0, del:1, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1", timestamp:time.Time{sec:63620935980, nsec:795006103, loc:(*time.Location)(0xa2479e0)}} I0125 05:13:00.795070 4678 controller_utils.go:523] Controller postgresql-master-1 deleting pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj I0125 05:13:00.795674 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:13:00.795787 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:00.795775735 -0500 EST. I0125 05:13:00.797362 4678 audit.go:125] 2017-01-25T05:13:00.797326782-05:00 AUDIT: id="2b661de6-a672-4e87-9360-d69bdfe313cc" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:00.803576 4678 audit.go:45] 2017-01-25T05:13:00.803554865-05:00 AUDIT: id="2b661de6-a672-4e87-9360-d69bdfe313cc" response="200" I0125 05:13:00.803712 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (6.654441ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:00.804136 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:00.804575 4678 audit.go:125] 2017-01-25T05:13:00.804529439-05:00 AUDIT: id="02ce827f-f9a3-405d-b975-676ebf1572dd" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:00.804883 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:00.804998 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj ready condition last transition time 2017-01-25 05:12:42 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:00.804985921 -0500 EST. I0125 05:13:00.807425 4678 audit.go:125] 2017-01-25T05:13:00.807383958-05:00 AUDIT: id="adf48c57-04f3-44a0-a303-52d48ff4c1cf" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:13:00.807795 4678 audit.go:45] 2017-01-25T05:13:00.807780645-05:00 AUDIT: id="02ce827f-f9a3-405d-b975-676ebf1572dd" response="200" I0125 05:13:00.807887 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (9.076784ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.808424 4678 config.go:281] Setting pods for source api I0125 05:13:00.808945 4678 audit.go:45] 2017-01-25T05:13:00.808929997-05:00 AUDIT: id="adf48c57-04f3-44a0-a303-52d48ff4c1cf" response="200" I0125 05:13:00.809285 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11080 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11121 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc437b56ff8 Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:00.809384 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:13:30.806265384 -0500 EST, labels map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example]. I0125 05:13:00.809447 4678 controller_utils.go:320] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 received delete for pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj I0125 05:13:00.809457 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1", timestamp:time.Time{sec:63620935980, nsec:795006103, loc:(*time.Location)(0xa2479e0)}} I0125 05:13:00.809493 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11080 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11121 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc437b56ff8 Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:00.809711 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.809774 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:00.809568 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:13:30.806265384 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-6jfgj", GenerateName:"postgresql-master-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11121", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:89557223, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc4340e9ba0), DeletionGracePeriodSeconds:(*int64)(0xc437b56ff8), Labels:map[string]string{"app":"pg-replica-example", "deployment":"postgresql-master-1", "deploymentconfig":"postgresql-master", "name":"postgresql-master"}, Annotations:map[string]string{"openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-1", "openshift.io/generated-by":"OpenShiftNewApp", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-1\",\"uid\":\"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11016\"}}\n", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc4340e9ce0), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc43298dd70), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"X5NgRSrwacHP", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc43298df80), ReadinessProbe:(*api.Probe)(0xc43298dfb0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4319180f0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc437b57150), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc432149540), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935962, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.7", StartTime:(*unversioned.Time)(0xc4340e9f40), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(0xc4340e9f80), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:true, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f"}}}}. I0125 05:13:00.809862 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:13:00.809896 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:13:00.809915 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:13:00.809947 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:13:00.809970 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:13:00.809985 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:13:00.809990 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:13:00.810091 4678 docker_manager.go:1536] Killing container "0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f postgresql-master extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj" with 30 second grace period I0125 05:13:00.810310 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:13:00.810999 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (5.444792ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:00.811326 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 1->1 (need 0), fullyLabeledReplicas 1->1, readyReplicas 1->1, availableReplicas 1->1, sequence No: 2->3 I0125 05:13:00.811828 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1", UID:"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11119", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: postgresql-master-1-6jfgj I0125 05:13:00.812247 4678 audit.go:125] 2017-01-25T05:13:00.812211595-05:00 AUDIT: id="859046a7-633c-4674-aa3d-1b4d3c13ffdc" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:00.813002 4678 audit.go:125] 2017-01-25T05:13:00.812968789-05:00 AUDIT: id="e8248465-a645-41ed-8474-b5a9ce2ed188" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:00.815571 4678 audit.go:125] 2017-01-25T05:13:00.815537801-05:00 AUDIT: id="76212c7c-f2cd-426f-be05-2b87f01abeb6" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:00.816078 4678 audit.go:45] 2017-01-25T05:13:00.816062511-05:00 AUDIT: id="e8248465-a645-41ed-8474-b5a9ce2ed188" response="200" I0125 05:13:00.817275 4678 audit.go:45] 2017-01-25T05:13:00.817256659-05:00 AUDIT: id="859046a7-633c-4674-aa3d-1b4d3c13ffdc" response="200" I0125 05:13:00.817366 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (5.410906ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:00.817499 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:00.820636 4678 audit.go:45] 2017-01-25T05:13:00.82062195-05:00 AUDIT: id="76212c7c-f2cd-426f-be05-2b87f01abeb6" response="200" I0125 05:13:00.820701 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (9.434105ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.821479 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (3.459763ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:00.821702 4678 audit.go:125] 2017-01-25T05:13:00.821662694-05:00 AUDIT: id="94c2fa30-bd94-469f-bd40-bfee9d8756d9" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:13:00.821913 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:13:00.822619 4678 audit.go:125] 2017-01-25T05:13:00.822583027-05:00 AUDIT: id="80af02dd-b965-4920-9021-3f43150253a3" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status" I0125 05:13:00.824092 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:00.826056 4678 audit.go:125] 2017-01-25T05:13:00.826016482-05:00 AUDIT: id="339f40cc-7614-4bd9-a506-8bec00692af1" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-master-1&resourceVersion=11119" I0125 05:13:00.826156 4678 audit.go:45] 2017-01-25T05:13:00.826142449-05:00 AUDIT: id="80af02dd-b965-4920-9021-3f43150253a3" response="200" I0125 05:13:00.826262 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status: (3.931226ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:00.826488 4678 audit.go:45] 2017-01-25T05:13:00.826475487-05:00 AUDIT: id="339f40cc-7614-4bd9-a506-8bec00692af1" response="200" I0125 05:13:00.826687 4678 audit.go:125] 2017-01-25T05:13:00.826653034-05:00 AUDIT: id="a0e8a74d-2038-4f64-b1e6-820b86307140" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:00.827267 4678 audit.go:45] 2017-01-25T05:13:00.827254227-05:00 AUDIT: id="94c2fa30-bd94-469f-bd40-bfee9d8756d9" response="200" I0125 05:13:00.827951 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (15.204014ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.828399 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (33.484542ms) I0125 05:13:00.828471 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj in state Running, deletion time 2017-01-25 05:13:30.806265384 -0500 EST I0125 05:13:00.828508 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 1->0, availableReplicas 1->0, sequence No: 2->3 I0125 05:13:00.828866 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 0->0 I0125 05:13:00.828897 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:13:00.829087 4678 config.go:281] Setting pods for source api I0125 05:13:00.829491 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11121 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc437b56ff8 Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11123 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4355d1908 Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:00.829584 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:13:30.806265384 -0500 EST, labels map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master]. I0125 05:13:00.829658 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11121 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc437b56ff8 Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11123 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4355d1908 Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:00.829725 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:13:30.806265384 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-6jfgj", GenerateName:"postgresql-master-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11123", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:89557223, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc43426b7c0), DeletionGracePeriodSeconds:(*int64)(0xc4355d1908), Labels:map[string]string{"deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example", "deployment":"postgresql-master-1"}, Annotations:map[string]string{"kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-1\",\"uid\":\"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11016\"}}\n", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-1", "openshift.io/generated-by":"OpenShiftNewApp"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc43426b980), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc429ade930), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"X5NgRSrwacHP", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc429ade9c0), ReadinessProbe:(*api.Probe)(0xc429ade9f0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc429adea20), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc4355d1a70), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc43789b2c0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935962, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.7", StartTime:(*unversioned.Time)(0xc43148c0a0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(0xc43148c0e0), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:true, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f"}}}}. I0125 05:13:00.830022 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:13:00.830050 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:13:00.830065 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:13:00.830094 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:13:00.830120 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:13:00.830137 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:13:00.830143 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:13:00.830233 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:13:00.830708 4678 status_manager.go:425] Status for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935962 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.7 StartTime:0xc4283337a0 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc43170d3c0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f}]} version:4 podName:postgresql-master-1-6jfgj podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:00.830765 4678 status_manager.go:435] Pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" is terminated, but some containers are still running I0125 05:13:00.831549 4678 audit.go:125] 2017-01-25T05:13:00.831514157-05:00 AUDIT: id="901a1fb1-ede0-4b00-99a3-f8a43d4b8adb" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:00.832881 4678 audit.go:45] 2017-01-25T05:13:00.832867173-05:00 AUDIT: id="a0e8a74d-2038-4f64-b1e6-820b86307140" response="201" I0125 05:13:00.832945 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (19.717501ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.834063 4678 audit.go:125] 2017-01-25T05:13:00.834029635-05:00 AUDIT: id="5edb8b01-9b11-4c5d-80a3-e7dff3de1026" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:13:00.834155 4678 audit.go:45] 2017-01-25T05:13:00.834142883-05:00 AUDIT: id="901a1fb1-ede0-4b00-99a3-f8a43d4b8adb" response="200" I0125 05:13:00.834254 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (10.33581ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.834581 4678 audit.go:45] 2017-01-25T05:13:00.834569494-05:00 AUDIT: id="5edb8b01-9b11-4c5d-80a3-e7dff3de1026" response="409" I0125 05:13:00.834592 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:13:00.834633 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (3.432471ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.834658 4678 roundrobin.go:275] LoadBalancerRR: Removing endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master: I0125 05:13:00.834739 4678 proxier.go:631] Removing endpoints for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master:" I0125 05:13:00.834773 4678 proxier.go:804] Syncing iptables rules I0125 05:13:00.834784 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:00.849103 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (39.044899ms) I0125 05:13:00.855150 4678 audit.go:125] 2017-01-25T05:13:00.855097047-05:00 AUDIT: id="33f102c1-5278-4420-8c22-469c1521bc2b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:13:00.855806 4678 audit.go:125] 2017-01-25T05:13:00.855766491-05:00 AUDIT: id="f60fc37e-94b5-4f3f-a98e-c2e47693d3ae" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:00.857644 4678 audit.go:45] 2017-01-25T05:13:00.857630207-05:00 AUDIT: id="33f102c1-5278-4420-8c22-469c1521bc2b" response="200" I0125 05:13:00.857896 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (7.650437ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.858110 4678 audit.go:45] 2017-01-25T05:13:00.858098846-05:00 AUDIT: id="f60fc37e-94b5-4f3f-a98e-c2e47693d3ae" response="200" I0125 05:13:00.858180 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (7.129302ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.858523 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:13:00.858919 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 1->0, availableReplicas 1->0, sequence No: 3->3 I0125 05:13:00.861279 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:00.861351 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 0 Endpoints [] I0125 05:13:00.861387 4678 healthcheck.go:89] Deleting endpoints map for service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, all local endpoints gone I0125 05:13:00.861398 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:00.861413 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:00.861430 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:00.861440 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:13:00.861474 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:00.861486 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:00.861496 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:13:00.871652 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:00.889481 4678 audit.go:125] 2017-01-25T05:13:00.889406208-05:00 AUDIT: id="60004ec3-7d47-41dc-ae9a-a4246c8e326d" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:00.890596 4678 audit.go:45] 2017-01-25T05:13:00.890560449-05:00 AUDIT: id="60004ec3-7d47-41dc-ae9a-a4246c8e326d" response="200" I0125 05:13:00.890722 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (31.222142ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:00.890950 4678 audit.go:125] 2017-01-25T05:13:00.890895333-05:00 AUDIT: id="657d5761-aa6e-4cce-9754-1cdb2836f19d" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:13:00.895646 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (46.419277ms) I0125 05:13:00.898526 4678 audit.go:45] 2017-01-25T05:13:00.898504178-05:00 AUDIT: id="657d5761-aa6e-4cce-9754-1cdb2836f19d" response="200" I0125 05:13:00.900347 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (40.320971ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.900964 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (72.525652ms) I0125 05:13:00.901059 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj in state Running, deletion time 2017-01-25 05:13:30.806265384 -0500 EST I0125 05:13:00.901102 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 1->0, availableReplicas 1->0, sequence No: 3->3 I0125 05:13:00.901554 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 0->0 I0125 05:13:00.901566 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-master-1, 1->0 I0125 05:13:00.902033 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:13:00.903591 4678 audit.go:125] 2017-01-25T05:13:00.903538817-05:00 AUDIT: id="90d8da0b-13a1-4d15-a319-acc7dbb2b3b3" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:00.905355 4678 audit.go:125] 2017-01-25T05:13:00.905296285-05:00 AUDIT: id="853b8a9e-f68f-47d8-bd46-fede3d2507ed" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:13:00.906075 4678 audit.go:45] 2017-01-25T05:13:00.906060974-05:00 AUDIT: id="853b8a9e-f68f-47d8-bd46-fede3d2507ed" response="409" I0125 05:13:00.906131 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (3.411852ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.908638 4678 audit.go:125] 2017-01-25T05:13:00.908603823-05:00 AUDIT: id="dfd8c409-e1b7-44c3-825a-aa7733dced4e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:13:00.909747 4678 audit.go:45] 2017-01-25T05:13:00.909733736-05:00 AUDIT: id="dfd8c409-e1b7-44c3-825a-aa7733dced4e" response="200" I0125 05:13:00.909947 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (3.25231ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.910240 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1, replicas 0->0 (need 0), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 3->3 I0125 05:13:00.912518 4678 audit.go:125] 2017-01-25T05:13:00.912481964-05:00 AUDIT: id="34cb9479-fbcf-463e-90c6-1486d7508580" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status" I0125 05:13:00.916513 4678 audit.go:45] 2017-01-25T05:13:00.916495928-05:00 AUDIT: id="34cb9479-fbcf-463e-90c6-1486d7508580" response="200" I0125 05:13:00.917717 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1/status: (7.055822ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:00.918229 4678 factory.go:154] Replication controller "postgresql-master-1" updated. I0125 05:13:00.919056 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (18.04133ms) I0125 05:13:00.919110 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj in state Running, deletion time 2017-01-25 05:13:30.806265384 -0500 EST I0125 05:13:00.919135 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (49.967µs) I0125 05:13:00.919299 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-1. Desired pod count change: 0->0 I0125 05:13:00.919358 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj in state Running, deletion time 2017-01-25 05:13:30.806265384 -0500 EST I0125 05:13:00.919377 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (48.669µs) I0125 05:13:00.919732 4678 audit.go:45] 2017-01-25T05:13:00.919718589-05:00 AUDIT: id="90d8da0b-13a1-4d15-a319-acc7dbb2b3b3" response="200" I0125 05:13:00.919855 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (16.580783ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:00.920220 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:00.921136 4678 audit.go:125] 2017-01-25T05:13:00.92109682-05:00 AUDIT: id="257fc1e8-e8d9-4aa5-8a69-bcba86d1cb42" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:00.923098 4678 store.go:283] GuaranteedUpdate of openshift.io/deploymentconfigs/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master failed because of a conflict, going to retry I0125 05:13:00.923326 4678 audit.go:45] 2017-01-25T05:13:00.923313916-05:00 AUDIT: id="257fc1e8-e8d9-4aa5-8a69-bcba86d1cb42" response="409" I0125 05:13:00.923387 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (2.537787ms) 409 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:00.923668 4678 controller.go:294] Cannot update the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master": Operation cannot be fulfilled on deploymentconfigs "postgresql-master": the object has been modified; please apply your changes to the latest version and try again I0125 05:13:00.923695 4678 controller.go:393] Error syncing deployment config extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master: Operation cannot be fulfilled on deploymentconfigs "postgresql-master": the object has been modified; please apply your changes to the latest version and try again I0125 05:13:00.928717 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.956271 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:00.964232 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:00.980253 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-master-1&resourceVersion=11119: (157.335458ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:00.983622 4678 audit.go:125] 2017-01-25T05:13:00.983553801-05:00 AUDIT: id="55673133-266b-43d2-a30d-059d2f84e5c7" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:13:00.985106 4678 audit.go:45] 2017-01-25T05:13:00.985090792-05:00 AUDIT: id="55673133-266b-43d2-a30d-059d2f84e5c7" response="200" I0125 05:13:00.985575 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (5.075334ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:00.994687 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:01.014006 4678 audit.go:125] 2017-01-25T05:13:01.01393937-05:00 AUDIT: id="51fcb384-ad88-42fc-b899-e75144135e85" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master" I0125 05:13:01.015833 4678 audit.go:125] 2017-01-25T05:13:01.01580287-05:00 AUDIT: id="06daa888-c34d-412d-b5f8-d5f267801a53" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:01.017025 4678 audit.go:45] 2017-01-25T05:13:01.017011669-05:00 AUDIT: id="06daa888-c34d-412d-b5f8-d5f267801a53" response="200" I0125 05:13:01.017304 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (6.846791ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:01.017868 4678 audit.go:45] 2017-01-25T05:13:01.017856537-05:00 AUDIT: id="51fcb384-ad88-42fc-b899-e75144135e85" response="200" I0125 05:13:01.018375 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master: (10.088109ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:01.022871 4678 audit.go:125] 2017-01-25T05:13:01.022825531-05:00 AUDIT: id="2dc67252-1613-4749-ab6e-65a198d1cfb6" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=11129" I0125 05:13:01.023417 4678 audit.go:45] 2017-01-25T05:13:01.023402294-05:00 AUDIT: id="2dc67252-1613-4749-ab6e-65a198d1cfb6" response="200" I0125 05:13:01.024410 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:01.043746 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:01.062583 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:01.081859 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:01.101781 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:01.101816 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:01.135149 4678 proxier.go:797] syncProxyRules took 300.368917ms I0125 05:13:01.135188 4678 proxier.go:566] OnEndpointsUpdate took 300.508517ms for 6 endpoints I0125 05:13:01.135266 4678 proxier.go:381] Received update notice: [] I0125 05:13:01.135302 4678 proxier.go:804] Syncing iptables rules I0125 05:13:01.135312 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:01.154581 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:01.173098 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:01.195027 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:01.214219 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:01.234174 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:01.247047 4678 generic.go:145] GenericPLEG: b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d: running -> exited I0125 05:13:01.247106 4678 generic.go:145] GenericPLEG: b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f: running -> exited I0125 05:13:01.247449 4678 docker_manager.go:1577] Container "0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f postgresql-master extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj" exited after 437.338549ms I0125 05:13:01.248082 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11018", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Killing' Killing container with docker id 0b1e46b84aab: Need to kill pod. I0125 05:13:01.248904 4678 audit.go:125] 2017-01-25T05:13:01.248848162-05:00 AUDIT: id="5f4118a0-8440-40a7-8e34-f46a31a6f9b2" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:01.252595 4678 audit.go:45] 2017-01-25T05:13:01.25257523-05:00 AUDIT: id="5f4118a0-8440-40a7-8e34-f46a31a6f9b2" response="201" I0125 05:13:01.252677 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.133891ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.260573 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:01.261724 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:01.272267 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc436830f20 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/2f5e0764 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc431158ea0 NetworkSettings:0xc434b16f00} I0125 05:13:01.273117 4678 docker_manager.go:1536] Killing container "bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj" with 30 second grace period I0125 05:13:01.281018 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc436831a20 Mounts:[] Config:0xc431159c20 NetworkSettings:0xc434b17100} I0125 05:13:01.283024 4678 generic.go:342] PLEG: Write status for postgresql-slave-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-slave-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.2", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc432f0a540), (*container.ContainerStatus)(0xc432f0a700)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:01.283165 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d"} I0125 05:13:01.283258 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.283327 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:13:01.283492 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.284321 4678 audit.go:125] 2017-01-25T05:13:01.284283902-05:00 AUDIT: id="fdb54889-264a-4829-b773-4b76d53c2f42" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy" I0125 05:13:01.285857 4678 audit.go:45] 2017-01-25T05:13:01.285841177-05:00 AUDIT: id="fdb54889-264a-4829-b773-4b76d53c2f42" response="200" I0125 05:13:01.285963 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy: (1.97245ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.287058 4678 audit.go:125] 2017-01-25T05:13:01.287015395-05:00 AUDIT: id="c8661a62-02ad-4536-ae6c-5f350874afb8" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status" I0125 05:13:01.288227 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:01.311034 4678 audit.go:45] 2017-01-25T05:13:01.311010823-05:00 AUDIT: id="c8661a62-02ad-4536-ae6c-5f350874afb8" response="200" I0125 05:13:01.311167 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status: (24.379357ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.312380 4678 replication_controller.go:378] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11048 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11131 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:01.312511 4678 replication_controller.go:255] No controllers found for pod postgresql-slave-1-deploy, replication manager will avoid syncing I0125 05:13:01.312548 4678 replica_set.go:320] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11048 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11131 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-slave-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:01.312627 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-deploy, ReplicaSet controller will avoid syncing I0125 05:13:01.312664 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-deploy, job controller will avoid syncing I0125 05:13:01.312691 4678 daemoncontroller.go:332] Pod postgresql-slave-1-deploy updated. I0125 05:13:01.312722 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-deploy, daemon set controller will avoid syncing I0125 05:13:01.312751 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-deploy" I0125 05:13:01.312767 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:13:01.312772 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-deploy" I0125 05:13:01.312863 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-deploy, StatefulSet controller will avoid syncing I0125 05:13:01.313304 4678 status_manager.go:425] Status for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Succeeded Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935981 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935916 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:0xc430d2f780 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running: Terminated:0xc42fb16c40} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d}]} version:3 podName:postgresql-slave-1-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:01.313494 4678 config.go:281] Setting pods for source api I0125 05:13:01.314820 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.317802 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc430fdc160 Mounts:[{Name: Source:/tmp/openshift-extended-tests/persistent-volumes816894978/0000099920249 Destination:/var/lib/pgsql/data Driver: Mode: RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql-master/88f8e310 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc435a259e0 NetworkSettings:0xc43224f200} I0125 05:13:01.321634 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:01.353110 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc430fdc9a0 Mounts:[] Config:0xc42ea04c60 NetworkSettings:0xc43224f500} I0125 05:13:01.353570 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:01.353608 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:01.365837 4678 generic.go:342] PLEG: Write status for postgresql-master-1-6jfgj/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-6jfgj", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.7", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42ccdce00), (*container.ContainerStatus)(0xc42ccdd260)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:01.366035 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.366362 4678 status_manager.go:312] Ignoring same status for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:56 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:00 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:56 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.4 StartTime:2017-01-25 05:12:56 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running:0xc4327bc840 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28}]} I0125 05:13:01.366503 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.366632 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f"} I0125 05:13:01.366708 4678 kubelet_pods.go:1029] Generating status for "postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8(b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.366881 4678 status_manager.go:402] Status for pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" is up-to-date; skipping I0125 05:13:01.373950 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:01.374349 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:01.375234 4678 audit.go:125] 2017-01-25T05:13:01.375176349-05:00 AUDIT: id="fe4e6cab-fd82-4fb8-ab06-917848df4eff" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:13:01.376106 4678 audit.go:125] 2017-01-25T05:13:01.376076902-05:00 AUDIT: id="fd368c42-9a86-47bd-993b-64d1ed32e2e1" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:13:01.377790 4678 audit.go:45] 2017-01-25T05:13:01.37777637-05:00 AUDIT: id="fd368c42-9a86-47bd-993b-64d1ed32e2e1" response="200" I0125 05:13:01.378022 4678 audit.go:45] 2017-01-25T05:13:01.378010096-05:00 AUDIT: id="fe4e6cab-fd82-4fb8-ab06-917848df4eff" response="200" I0125 05:13:01.378238 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (2.375721ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.378294 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (3.402517ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.378547 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:13:01.378698 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:01.378813 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:01.378900 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:13:01.379032 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:01.379127 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:01.397284 4678 proxier.go:797] syncProxyRules took 261.972768ms I0125 05:13:01.397319 4678 proxier.go:431] OnServiceUpdate took 262.037108ms for 4 services E0125 05:13:01.481935 4678 fsHandler.go:121] failed to collect filesystem stats - rootDiskErr: , rootInodeErr: , extraDiskErr: du command failed on /var/lib/docker/containers/a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f with output stdout: , stderr: du: cannot access ‘/var/lib/docker/containers/a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f’: No such file or directory - exit status 1 I0125 05:13:01.487807 4678 docker_manager.go:1577] Container "bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj" exited after 214.663298ms I0125 05:13:01.583712 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.584497 4678 audit.go:125] 2017-01-25T05:13:01.5844586-05:00 AUDIT: id="06b09af7-a58e-46d7-a44f-c8a3f9d27856" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:13:01.585864 4678 audit.go:45] 2017-01-25T05:13:01.58585155-05:00 AUDIT: id="06b09af7-a58e-46d7-a44f-c8a3f9d27856" response="200" I0125 05:13:01.586089 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.865855ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.586256 4678 docker_manager.go:1938] Found pod infra container for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.586311 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.586323 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:13:01.586345 4678 docker_manager.go:2086] Got container changes for pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[]} I0125 05:13:01.586374 4678 docker_manager.go:2093] Killing Infra Container for "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" because all other containers are dead. I0125 05:13:01.588337 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:01.589816 4678 docker_manager.go:1536] Killing container "764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy" with 10 second grace period I0125 05:13:01.667065 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.667918 4678 audit.go:125] 2017-01-25T05:13:01.667869052-05:00 AUDIT: id="bfa81366-8ae1-42f2-a427-9a35d8389d2d" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:13:01.669284 4678 audit.go:45] 2017-01-25T05:13:01.669267943-05:00 AUDIT: id="bfa81366-8ae1-42f2-a427-9a35d8389d2d" response="200" I0125 05:13:01.669506 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.927637ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.669749 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.669806 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:01.669818 4678 docker_manager.go:1999] pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" container "deployment" exists as 3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28 I0125 05:13:01.669883 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d:-1 3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28:0]} I0125 05:13:01.684623 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:01.684805 4678 kubelet_pods.go:785] Killing unwanted pod "postgresql-slave-1-deploy" I0125 05:13:01.690971 4678 docker_manager.go:1536] Killing container "269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d /" with 10 second grace period I0125 05:13:01.694688 4678 docker_manager.go:1577] Container "269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d /" exited after 3.68936ms I0125 05:13:01.695169 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-slave-1-deploy", UID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"10970", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Killing' Killing container with docker id 269d8959d1b2: Need to kill pod. I0125 05:13:01.695721 4678 audit.go:125] 2017-01-25T05:13:01.695690534-05:00 AUDIT: id="23c21c42-d53e-4b2f-a615-a7c05936b9e9" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:01.698115 4678 audit.go:45] 2017-01-25T05:13:01.698104027-05:00 AUDIT: id="23c21c42-d53e-4b2f-a615-a7c05936b9e9" response="201" I0125 05:13:01.698170 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.682938ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:01.747967 4678 docker_manager.go:1577] Container "764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy" exited after 158.127848ms I0125 05:13:01.748357 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:01.748983 4678 kubelet_volumes.go:113] Orphaned pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:13:01.750854 4678 docker_manager.go:1536] Killing container "764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f /" with 10 second grace period I0125 05:13:01.751834 4678 docker_manager.go:1577] Container "764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f /" exited after 964.335µs W0125 05:13:01.751858 4678 docker_manager.go:1583] No ref for pod '"764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f /"' I0125 05:13:01.929702 4678 audit.go:125] 2017-01-25T05:13:01.929666424-05:00 AUDIT: id="795edc8f-d126-4314-aadd-2184267ba74a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:13:01.930793 4678 audit.go:45] 2017-01-25T05:13:01.930777539-05:00 AUDIT: id="795edc8f-d126-4314-aadd-2184267ba74a" response="200" I0125 05:13:01.930870 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.411822ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:01.973209 4678 iptables.go:362] running iptables -N [KUBE-MARK-DROP -t nat] I0125 05:13:01.992366 4678 iptables.go:362] running iptables -C [KUBE-MARK-DROP -t nat -j MARK --set-xmark 0x00008000/0x00008000] I0125 05:13:02.005341 4678 audit.go:125] 2017-01-25T05:13:02.005261477-05:00 AUDIT: id="14106d2c-b0dc-4ca0-bdc8-0dbcce075aad" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:02.006911 4678 audit.go:45] 2017-01-25T05:13:02.00689415-05:00 AUDIT: id="14106d2c-b0dc-4ca0-bdc8-0dbcce075aad" response="200" I0125 05:13:02.007226 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (4.665329ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:02.016137 4678 iptables.go:362] running iptables -N [KUBE-FIREWALL -t filter] I0125 05:13:02.035434 4678 iptables.go:362] running iptables -C [KUBE-FIREWALL -t filter -m comment --comment kubernetes firewall for dropping marked packets -m mark --mark 0x00008000/0x00008000 -j DROP] I0125 05:13:02.054137 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -j KUBE-FIREWALL] I0125 05:13:02.072809 4678 iptables.go:362] running iptables -C [INPUT -t filter -j KUBE-FIREWALL] I0125 05:13:02.097782 4678 iptables.go:362] running iptables -N [KUBE-MARK-MASQ -t nat] I0125 05:13:02.109419 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.7, Port: 5432, Timeout: 1s I0125 05:13:02.110295 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD X5NgRSrwacHP }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc4269cff50 0xc4269cff80 /dev/termination-log IfNotPresent 0xc4269cffb0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:02.113053 4678 exec.go:38] Exec probe response: "" I0125 05:13:02.113081 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD X5NgRSrwacHP }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc4269cff50 0xc4269cff80 /dev/termination-log IfNotPresent 0xc4269cffb0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:02.115044 4678 exec.go:38] Exec probe response: "" I0125 05:13:02.115065 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD X5NgRSrwacHP }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc4269cff50 0xc4269cff80 /dev/termination-log IfNotPresent 0xc4269cffb0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:02.116868 4678 exec.go:38] Exec probe response: "" W0125 05:13:02.116887 4678 prober.go:98] No ref for container "docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f" (postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master) I0125 05:13:02.116896 4678 prober.go:101] Readiness probe for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" errored: container not running (0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f) I0125 05:13:02.119460 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:02.138343 4678 iptables.go:362] running iptables -C [KUBE-MARK-MASQ -t nat -j MARK --set-xmark 0x00004000/0x00004000] I0125 05:13:02.159980 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:02.180929 4678 iptables.go:362] running iptables -C [KUBE-POSTROUTING -t nat -m comment --comment kubernetes service traffic requiring SNAT -m mark --mark 0x00004000/0x00004000 -j MASQUERADE] I0125 05:13:02.190154 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") from pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:02.190357 4678 util.go:340] Tearing down volume deployer-token-r7jj8 for pod b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:02.190712 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:02.215853 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (OuterVolumeSpecName: "deployer-token-r7jj8") pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "deployer-token-r7jj8". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:13:02.371102 4678 generic.go:145] GenericPLEG: b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f: running -> exited I0125 05:13:02.371142 4678 generic.go:145] GenericPLEG: b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741: running -> exited I0125 05:13:02.376992 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42c59a420 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/2f5e0764 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate}] Config:0xc4359d6240 NetworkSettings:0xc42b060900} I0125 05:13:02.380643 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42c59a9a0 Mounts:[] Config:0xc4359d66c0 NetworkSettings:0xc42b060d00} I0125 05:13:02.382275 4678 generic.go:342] PLEG: Write status for postgresql-slave-1-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-slave-1-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42ca30620), (*container.ContainerStatus)(0xc42ca307e0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:02.382405 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f"} I0125 05:13:02.388083 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42c59b080 Mounts:[{Name: Source:/tmp/openshift-extended-tests/persistent-volumes816894978/0000099920249 Destination:/var/lib/pgsql/data Driver: Mode: RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql-master/88f8e310 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42f81e900 NetworkSettings:0xc42b060f00} I0125 05:13:02.391995 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc43117cdc0 Mounts:[] Config:0xc430f28900 NetworkSettings:0xc435c21000} I0125 05:13:02.393577 4678 generic.go:342] PLEG: Write status for postgresql-master-1-6jfgj/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-6jfgj", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42ca30d20), (*container.ContainerStatus)(0xc42ca30fc0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:02.393644 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.393646 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741"} I0125 05:13:02.394424 4678 audit.go:125] 2017-01-25T05:13:02.39438824-05:00 AUDIT: id="ab75eee7-8785-4016-a4d0-5215cbecb18b" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:02.395867 4678 audit.go:45] 2017-01-25T05:13:02.395854589-05:00 AUDIT: id="ab75eee7-8785-4016-a4d0-5215cbecb18b" response="200" I0125 05:13:02.395972 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (1.821129ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.396878 4678 audit.go:125] 2017-01-25T05:13:02.396851282-05:00 AUDIT: id="379d3227-57b8-4712-8967-36bd9c11a3a5" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status" I0125 05:13:02.398604 4678 audit.go:45] 2017-01-25T05:13:02.398590018-05:00 AUDIT: id="379d3227-57b8-4712-8967-36bd9c11a3a5" response="200" I0125 05:13:02.398689 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj/status: (2.023473ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.399024 4678 status_manager.go:425] Status for pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc4283337a0 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running: Terminated:0xc43219afc0} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f}]} version:5 podName:postgresql-master-1-6jfgj podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:02.399085 4678 status_manager.go:441] Removing Pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" from etcd I0125 05:13:02.399406 4678 config.go:281] Setting pods for source api I0125 05:13:02.400020 4678 audit.go:125] 2017-01-25T05:13:02.399983119-05:00 AUDIT: id="2f63c147-4ea9-4c32-a466-c3b5f0c2bb0c" ip="172.18.7.222" method="DELETE" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:02.400741 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.400978 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11123 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4355d1908 Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11133 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4288249d0 Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.401071 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:13:30.806265384 -0500 EST, labels map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1]. I0125 05:13:02.401187 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj in state Running, deletion time 2017-01-25 05:13:30.806265384 -0500 EST I0125 05:13:02.401234 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (76.819µs) I0125 05:13:02.401269 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11123 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4355d1908 Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11133 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4288249d0 Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-1] Annotations:map[openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.401351 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:13:30.806265384 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-6jfgj", GenerateName:"postgresql-master-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11133", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:89557223, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42b7c54a0), DeletionGracePeriodSeconds:(*int64)(0xc4288249d0), Labels:map[string]string{"deployment":"postgresql-master-1", "deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example"}, Annotations:map[string]string{"openshift.io/generated-by":"OpenShiftNewApp", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-1\",\"uid\":\"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11016\"}}\n", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc42b7c55e0), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc433fb6540), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"X5NgRSrwacHP", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc433fb65d0), ReadinessProbe:(*api.Probe)(0xc433fb6600), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc433fb6630), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc428824b40), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc4367a1800), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-master]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc42b7c5860), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc42f4318f0)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f"}}}}. I0125 05:13:02.401651 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:13:02.401690 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:13:02.401711 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:13:02.401746 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:13:02.401774 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:13:02.401793 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:13:02.401799 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:13:02.402416 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:13:02.403777 4678 config.go:281] Setting pods for source api I0125 05:13:02.405005 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.405232 4678 replication_controller.go:378] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11133 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4288249d0 Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11134 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:00.806265384 -0500 EST DeletionGracePeriodSeconds:0xc42dd13110 Labels:map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/generated-by:OpenShiftNewApp kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.405317 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:13:00.806265384 -0500 EST, labels map[deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example]. I0125 05:13:02.405406 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj in state Running, deletion time 2017-01-25 05:13:00.806265384 -0500 EST I0125 05:13:02.405429 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (46.423µs) I0125 05:13:02.405451 4678 replica_set.go:320] Pod postgresql-master-1-6jfgj updated, objectMeta {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11133 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:30.806265384 -0500 EST DeletionGracePeriodSeconds:0xc4288249d0 Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-1-6jfgj GenerateName:postgresql-master-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj UID:b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11134 Generation:0 CreationTimestamp:2017-01-25 05:12:02.089557223 -0500 EST DeletionTimestamp:2017-01-25 05:13:00.806265384 -0500 EST DeletionGracePeriodSeconds:0xc42dd13110 Labels:map[app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-1","uid":"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11016"}} openshift.io/scc:restricted openshift.io/deployment-config.latest-version:1 openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.405520 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:13:00.806265384 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-6jfgj", GenerateName:"postgresql-master-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11134", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:89557223, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc437e3a580), DeletionGracePeriodSeconds:(*int64)(0xc42dd13110), Labels:map[string]string{"deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example", "deployment":"postgresql-master-1"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-1", "openshift.io/generated-by":"OpenShiftNewApp", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-1\",\"uid\":\"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11016\"}}\n"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc437e3a6c0), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4312faa20), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"X5NgRSrwacHP", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc4312faab0), ReadinessProbe:(*api.Probe)(0xc4312faae0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4312fab10), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc42dd134e0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc42db54500), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-master]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc437e3a960), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc431f34000)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f"}}}}. I0125 05:13:02.405818 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:13:02.405847 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:13:02.405858 4678 daemoncontroller.go:332] Pod postgresql-master-1-6jfgj updated. I0125 05:13:02.405886 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:13:02.405906 4678 disruption.go:326] updatePod called on pod "postgresql-master-1-6jfgj" I0125 05:13:02.405922 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:13:02.405928 4678 disruption.go:329] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:13:02.405987 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:13:02.406453 4678 audit.go:125] 2017-01-25T05:13:02.406416866-05:00 AUDIT: id="82485081-a9d5-4bb2-94fc-071d49834bad" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.407435 4678 config.go:281] Setting pods for source api I0125 05:13:02.407512 4678 audit.go:45] 2017-01-25T05:13:02.407498132-05:00 AUDIT: id="2f63c147-4ea9-4c32-a466-c3b5f0c2bb0c" response="200" I0125 05:13:02.407601 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (7.859309ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.407863 4678 status_manager.go:443] Pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" fully terminated and removed from etcd I0125 05:13:02.408034 4678 audit.go:45] 2017-01-25T05:13:02.408020662-05:00 AUDIT: id="82485081-a9d5-4bb2-94fc-071d49834bad" response="200" I0125 05:13:02.408090 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (5.178459ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.408423 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:13:02.408433 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.408670 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:13:00.806265384 -0500 EST, labels map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-1 deploymentconfig:postgresql-master]. I0125 05:13:02.408770 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (29.137µs) I0125 05:13:02.408795 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:13:00.806265384 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-1-6jfgj", GenerateName:"postgresql-master-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj", UID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11135", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:89557223, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc435f09720), DeletionGracePeriodSeconds:(*int64)(0xc431357b90), Labels:map[string]string{"name":"postgresql-master", "app":"pg-replica-example", "deployment":"postgresql-master-1", "deploymentconfig":"postgresql-master"}, Annotations:map[string]string{"openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-1", "openshift.io/generated-by":"OpenShiftNewApp", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-1\",\"uid\":\"b383709d-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11016\"}}\n", "openshift.io/scc":"restricted", "openshift.io/deployment-config.latest-version":"1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc435f09860), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc432e9e750), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"X5NgRSrwacHP", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc432e9e7e0), ReadinessProbe:(*api.Probe)(0xc432e9e810), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc432e9e840), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc431357d30), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc427259900), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-master]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc435f09ae0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc434948460)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f"}}}}. I0125 05:13:02.409068 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-1-6jfgj, ReplicaSet controller will avoid syncing I0125 05:13:02.409090 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-1-6jfgj, job controller will avoid syncing I0125 05:13:02.409100 4678 daemoncontroller.go:367] Pod postgresql-master-1-6jfgj deleted. I0125 05:13:02.409125 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-1-6jfgj, daemon set controller will avoid syncing I0125 05:13:02.409139 4678 disruption.go:355] deletePod called on pod "postgresql-master-1-6jfgj" I0125 05:13:02.409153 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-1-6jfgj, PodDisruptionBudget controller will avoid syncing. I0125 05:13:02.409158 4678 disruption.go:358] No matching pdb for pod "postgresql-master-1-6jfgj" I0125 05:13:02.410692 4678 audit.go:125] 2017-01-25T05:13:02.410655671-05:00 AUDIT: id="420b01b0-7eac-42ba-9623-fe7607feeb96" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.411317 4678 audit.go:45] 2017-01-25T05:13:02.411302582-05:00 AUDIT: id="420b01b0-7eac-42ba-9623-fe7607feeb96" response="200" I0125 05:13:02.411372 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (2.571876ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.411630 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (9.766836ms) I0125 05:13:02.412233 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1-6jfgj deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:13:02.412263 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-1-6jfgj, StatefulSet controller will avoid syncing I0125 05:13:02.413549 4678 kubelet.go:1976] Failed to delete pod "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)", err: pod not found I0125 05:13:02.414332 4678 audit.go:125] 2017-01-25T05:13:02.414306069-05:00 AUDIT: id="b6c35128-3b2e-4803-84cd-f45ecdaad279" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.415364 4678 audit.go:45] 2017-01-25T05:13:02.415350791-05:00 AUDIT: id="b6c35128-3b2e-4803-84cd-f45ecdaad279" response="200" I0125 05:13:02.415423 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (3.505108ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.416359 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:13:02.417378 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-1%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=11129: (1.396496286s) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:02.419281 4678 audit.go:125] 2017-01-25T05:13:02.419243688-05:00 AUDIT: id="0869a84d-857e-4804-9023-794aa00a526e" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:02.419936 4678 audit.go:125] 2017-01-25T05:13:02.419900996-05:00 AUDIT: id="68a179d2-9c25-4e63-ba47-47189bb021cd" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.420392 4678 audit.go:45] 2017-01-25T05:13:02.420378157-05:00 AUDIT: id="0869a84d-857e-4804-9023-794aa00a526e" response="200" I0125 05:13:02.420784 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (3.282671ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:02.421469 4678 audit.go:45] 2017-01-25T05:13:02.421454871-05:00 AUDIT: id="68a179d2-9c25-4e63-ba47-47189bb021cd" response="200" I0125 05:13:02.421521 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (3.421757ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.421754 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (10.094109ms) I0125 05:13:02.421889 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:13:02.422011 4678 proxier.go:804] Syncing iptables rules I0125 05:13:02.422035 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:02.431646 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:13:02.431777 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:02.431794 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:02.431812 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:13:02.431825 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:02.431834 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:02.431842 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:02.431850 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:02.436066 4678 audit.go:125] 2017-01-25T05:13:02.436013421-05:00 AUDIT: id="9ad28763-ef3d-4fdc-a847-e43fc8f64551" ip="172.17.0.4" method="PUT" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:02.440562 4678 audit.go:45] 2017-01-25T05:13:02.440542146-05:00 AUDIT: id="9ad28763-ef3d-4fdc-a847-e43fc8f64551" response="200" I0125 05:13:02.442044 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (9.014564ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:02.442606 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->1 I0125 05:13:02.442656 4678 controller_utils.go:158] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 either never recorded expectations, or the ttl expired. I0125 05:13:02.442699 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:1, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2", timestamp:time.Time{sec:63620935982, nsec:442696260, loc:(*time.Location)(0xa2479e0)}} I0125 05:13:02.442731 4678 replication_controller.go:541] Too few "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-master-2" replicas, need 1, creating 1 I0125 05:13:02.444262 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:13:02.447613 4678 audit.go:125] 2017-01-25T05:13:02.447567704-05:00 AUDIT: id="ebfb51d4-8ae8-485f-843d-0dad0e0f95ca" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:13:02.448725 4678 audit.go:125] 2017-01-25T05:13:02.44870155-05:00 AUDIT: id="8257fa8f-1a96-4559-9e47-e139dcc2f0e4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/images/sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" I0125 05:13:02.449095 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:02.458662 4678 audit.go:125] 2017-01-25T05:13:02.458562873-05:00 AUDIT: id="e7f1b514-3a49-4eb3-bb0e-8ff4069282dc" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:02.467221 4678 audit.go:45] 2017-01-25T05:13:02.467186242-05:00 AUDIT: id="8257fa8f-1a96-4559-9e47-e139dcc2f0e4" response="200" I0125 05:13:02.467989 4678 panics.go:76] GET /oapi/v1/images/sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389: (19.447988ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:02.469023 4678 audit.go:45] 2017-01-25T05:13:02.469010883-05:00 AUDIT: id="e7f1b514-3a49-4eb3-bb0e-8ff4069282dc" response="200" I0125 05:13:02.469146 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (24.046844ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:02.470135 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:02.470880 4678 admission.go:77] getting security context constraints for pod (generate: postgresql-master-2-) in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 with user info &{system:serviceaccount:openshift-infra:replication-controller cab49cde-e2d9-11e6-a4b0-0e6a5cbf0094 [system:serviceaccounts system:serviceaccounts:openshift-infra system:authenticated] map[]} I0125 05:13:02.470914 4678 admission.go:88] getting security context constraints for pod (generate: postgresql-master-2-) with service account info &{system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:default [system:serviceaccounts system:serviceaccounts:extended-test-postgresql-replication-1-34bbd-xd4g8] map[]} I0125 05:13:02.471803 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:02.472446 4678 audit.go:125] 2017-01-25T05:13:02.472404271-05:00 AUDIT: id="ecc7eaca-50ce-4336-b7e1-fa5f88fb680d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:13:02.473980 4678 audit.go:125] 2017-01-25T05:13:02.473938092-05:00 AUDIT: id="516b5995-b212-4b7d-ae9d-51138f4912ca" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:02.474270 4678 audit.go:45] 2017-01-25T05:13:02.474255788-05:00 AUDIT: id="ecc7eaca-50ce-4336-b7e1-fa5f88fb680d" response="200" I0125 05:13:02.474340 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (2.210196ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:02.474603 4678 matcher.go:297] got preallocated values for min: 1000640000, max: 1000649999 for uid range in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:02.474614 4678 matcher.go:310] got preallocated value for level: s0:c25,c20 for selinux options in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:02.474622 4678 matcher.go:340] got preallocated value for groups: 1000640000/10000 in namespace extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:02.474644 4678 admission.go:149] validating pod (generate: postgresql-master-2-) against providers restricted I0125 05:13:02.474702 4678 admission.go:116] pod (generate: postgresql-master-2-) validated against provider restricted I0125 05:13:02.475960 4678 audit.go:45] 2017-01-25T05:13:02.475946679-05:00 AUDIT: id="516b5995-b212-4b7d-ae9d-51138f4912ca" response="200" I0125 05:13:02.476387 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (12.292225ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:02.476577 4678 audit.go:45] 2017-01-25T05:13:02.47656361-05:00 AUDIT: id="ebfb51d4-8ae8-485f-843d-0dad0e0f95ca" response="201" I0125 05:13:02.476661 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (31.105505ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.477026 4678 factory.go:488] About to try and schedule pod postgresql-master-2-46j9k I0125 05:13:02.477034 4678 controller_utils.go:512] Controller postgresql-master-2 created pod postgresql-master-2-46j9k I0125 05:13:02.477038 4678 scheduler.go:93] Attempting to schedule pod: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k I0125 05:13:02.477087 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 0->0 (need 1), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:13:02.477259 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2", timestamp:time.Time{sec:63620935982, nsec:442696260, loc:(*time.Location)(0xa2479e0)}} I0125 05:13:02.477308 4678 replica_set.go:288] Pod postgresql-master-2-46j9k created: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-46j9k", GenerateName:"postgresql-master-2-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11139", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935982, nsec:474873911, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"name":"postgresql-master", "app":"pg-replica-example", "deployment":"postgresql-master-2", "deploymentconfig":"postgresql-master"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"2", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-2\",\"uid\":\"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11137\"}}\n", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-2"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc4356cd4a0), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42e535110), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"newpass", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42e5351a0), ReadinessProbe:(*api.Probe)(0xc42e5351d0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42e535200), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc43636c2c0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"", SecurityContext:(*api.PodSecurityContext)(0xc42d3c4200), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Pending", Conditions:[]api.PodCondition(nil), Message:"", Reason:"", HostIP:"", PodIP:"", StartTime:(*unversioned.Time)(nil), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus(nil)}}. I0125 05:13:02.477552 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2", UID:"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11137", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: postgresql-master-2-46j9k I0125 05:13:02.477573 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:13:02.477609 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:13:02.477627 4678 daemoncontroller.go:309] Pod postgresql-master-2-46j9k added. I0125 05:13:02.477662 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:13:02.477682 4678 disruption.go:314] addPod called on pod "postgresql-master-2-46j9k" I0125 05:13:02.477702 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:13:02.477708 4678 disruption.go:317] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:13:02.477928 4678 pet_set.go:160] Pod postgresql-master-2-46j9k created, labels: map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] I0125 05:13:02.477953 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:13:02.478155 4678 factory.go:648] Attempting to bind postgresql-master-2-46j9k to 172.18.7.222 I0125 05:13:02.479365 4678 audit.go:125] 2017-01-25T05:13:02.479330384-05:00 AUDIT: id="f7746b6f-9b73-4ca0-9f12-7d13e50b33d8" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings" I0125 05:13:02.480852 4678 audit.go:125] 2017-01-25T05:13:02.480820434-05:00 AUDIT: id="0e9aa80e-765d-4be6-910a-91e87baa3773" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:02.481546 4678 audit.go:125] 2017-01-25T05:13:02.481511069-05:00 AUDIT: id="1e210d75-9c13-45fc-b6c7-4a574d1b16d1" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:13:02.482627 4678 audit.go:45] 2017-01-25T05:13:02.482613262-05:00 AUDIT: id="f7746b6f-9b73-4ca0-9f12-7d13e50b33d8" response="201" I0125 05:13:02.482682 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/bindings: (3.61375ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:02.483599 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11139 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11140 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.483739 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11139 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11140 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.483851 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:13:02.483879 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:13:02.483901 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:13:02.483931 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:13:02.483951 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:13:02.483966 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:13:02.483972 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:13:02.484047 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:13:02.484264 4678 config.go:281] Setting pods for source api I0125 05:13:02.485024 4678 config.go:397] Receiving a new pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.485510 4678 kubelet.go:1781] SyncLoop (ADD, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.485765 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.486561 4678 audit.go:45] 2017-01-25T05:13:02.48654785-05:00 AUDIT: id="1e210d75-9c13-45fc-b6c7-4a574d1b16d1" response="200" I0125 05:13:02.487311 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:02.498361 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.498715 4678 audit.go:45] 2017-01-25T05:13:02.498655157-05:00 AUDIT: id="0e9aa80e-765d-4be6-910a-91e87baa3773" response="201" I0125 05:13:02.499175 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (20.690407ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.502093 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (24.147341ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.503433 4678 audit.go:125] 2017-01-25T05:13:02.503359686-05:00 AUDIT: id="d136f4d4-a28a-48b2-8c1b-72b3c38ed20f" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:02.504854 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (62.207101ms) I0125 05:13:02.505126 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 1->2 I0125 05:13:02.506243 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 1->1 I0125 05:13:02.507535 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:13:02.508330 4678 audit.go:125] 2017-01-25T05:13:02.508282733-05:00 AUDIT: id="a9a13b06-0382-4a2f-ae07-516431b7ec22" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.509335 4678 audit.go:125] 2017-01-25T05:13:02.509302711-05:00 AUDIT: id="75ec11ea-c7d5-4612-885f-990063540188" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k" I0125 05:13:02.514503 4678 audit.go:45] 2017-01-25T05:13:02.514484332-05:00 AUDIT: id="a9a13b06-0382-4a2f-ae07-516431b7ec22" response="200" I0125 05:13:02.514596 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (34.45656ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.515124 4678 audit.go:45] 2017-01-25T05:13:02.51511187-05:00 AUDIT: id="75ec11ea-c7d5-4612-885f-990063540188" response="200" I0125 05:13:02.515583 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:13:02.517721 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k: (8.703465ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.518736 4678 audit.go:125] 2017-01-25T05:13:02.518698955-05:00 AUDIT: id="ecb116ee-ee4a-46e9-9e17-3b2d4874deb4" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:13:02.519446 4678 audit.go:45] 2017-01-25T05:13:02.519435211-05:00 AUDIT: id="ecb116ee-ee4a-46e9-9e17-3b2d4874deb4" response="409" I0125 05:13:02.519493 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (6.73832ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.519635 4678 audit.go:125] 2017-01-25T05:13:02.519602834-05:00 AUDIT: id="3db8f0a8-cd3f-4233-8a2a-88705ce403ff" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-master-2&resourceVersion=11137" I0125 05:13:02.520044 4678 audit.go:45] 2017-01-25T05:13:02.520031772-05:00 AUDIT: id="3db8f0a8-cd3f-4233-8a2a-88705ce403ff" response="200" I0125 05:13:02.522560 4678 audit.go:125] 2017-01-25T05:13:02.522520779-05:00 AUDIT: id="3f22f7c5-d9db-44cd-a17b-fcddea10126e" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status" I0125 05:13:02.525237 4678 audit.go:45] 2017-01-25T05:13:02.525222521-05:00 AUDIT: id="d136f4d4-a28a-48b2-8c1b-72b3c38ed20f" response="201" I0125 05:13:02.525297 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (22.568492ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:02.525556 4678 audit.go:45] 2017-01-25T05:13:02.525543675-05:00 AUDIT: id="3f22f7c5-d9db-44cd-a17b-fcddea10126e" response="200" I0125 05:13:02.525637 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status: (3.336201ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.525784 4678 config.go:281] Setting pods for source api I0125 05:13:02.527116 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.527123 4678 status_manager.go:425] Status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Pending Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc429674340 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting:0xc429674320 Running: Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID: ContainerID:}]} version:1 podName:postgresql-master-2-46j9k podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:02.527306 4678 audit.go:125] 2017-01-25T05:13:02.52727088-05:00 AUDIT: id="3fe71103-36d8-4385-8e55-b41d9928ad3c" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.527371 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11140 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11144 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.527514 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11140 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11144 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:02.527637 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:13:02.527670 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:13:02.527695 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:13:02.527725 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:13:02.527747 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:13:02.527763 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:13:02.527769 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:13:02.527861 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:13:02.528714 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:02.540665 4678 audit.go:45] 2017-01-25T05:13:02.540644917-05:00 AUDIT: id="3fe71103-36d8-4385-8e55-b41d9928ad3c" response="200" I0125 05:13:02.540814 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (19.877873ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.541762 4678 audit.go:125] 2017-01-25T05:13:02.541714188-05:00 AUDIT: id="f342de8e-ba27-4042-9a1f-65c5274b1374" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:02.543585 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (65.811772ms) I0125 05:13:02.544841 4678 audit.go:45] 2017-01-25T05:13:02.544818241-05:00 AUDIT: id="f342de8e-ba27-4042-9a1f-65c5274b1374" response="200" I0125 05:13:02.545130 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (21.367195ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.545475 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:13:02.549437 4678 audit.go:125] 2017-01-25T05:13:02.549398829-05:00 AUDIT: id="8f7894c2-6add-4450-a9c8-57c63d2aac0f" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:13:02.550638 4678 audit.go:125] 2017-01-25T05:13:02.550592421-05:00 AUDIT: id="90f5c61f-d610-42b8-af3b-68bfff5b7c5e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.553391 4678 audit.go:45] 2017-01-25T05:13:02.553376232-05:00 AUDIT: id="90f5c61f-d610-42b8-af3b-68bfff5b7c5e" response="200" I0125 05:13:02.553503 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (9.510998ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.553710 4678 audit.go:45] 2017-01-25T05:13:02.553699008-05:00 AUDIT: id="8f7894c2-6add-4450-a9c8-57c63d2aac0f" response="200" I0125 05:13:02.555338 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (8.326665ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.555578 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:02.565963 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:13:02.567603 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (62.594011ms) I0125 05:13:02.567912 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:13:02.567973 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 0->1 (need 1), fullyLabeledReplicas 0->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:13:02.568914 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 1->1 I0125 05:13:02.568948 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-master-2, 0->1 I0125 05:13:02.569988 4678 audit.go:125] 2017-01-25T05:13:02.569940036-05:00 AUDIT: id="f5e9be3c-f2e5-44d2-9580-ff24a0b1c390" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:02.572440 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?fieldSelector=metadata.name%3Dpostgresql-master-2&resourceVersion=11137: (65.91174ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:02.578018 4678 audit.go:45] 2017-01-25T05:13:02.577997316-05:00 AUDIT: id="f5e9be3c-f2e5-44d2-9580-ff24a0b1c390" response="200" I0125 05:13:02.578151 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (8.525384ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:02.580558 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:02.581459 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:02.581798 4678 audit.go:125] 2017-01-25T05:13:02.581760052-05:00 AUDIT: id="13a89484-1e2f-4126-8bc5-dafbfd0ad5e0" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:13:02.582566 4678 audit.go:45] 2017-01-25T05:13:02.582554634-05:00 AUDIT: id="13a89484-1e2f-4126-8bc5-dafbfd0ad5e0" response="409" I0125 05:13:02.582623 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (8.93721ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.582751 4678 audit.go:125] 2017-01-25T05:13:02.582718187-05:00 AUDIT: id="48648b7e-38d1-41ff-b8dc-cc662360d694" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:02.583159 4678 audit.go:125] 2017-01-25T05:13:02.58313007-05:00 AUDIT: id="e5a54da1-9e90-4cae-a749-60fb3b5594de" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:02.585633 4678 audit.go:45] 2017-01-25T05:13:02.585618614-05:00 AUDIT: id="48648b7e-38d1-41ff-b8dc-cc662360d694" response="200" I0125 05:13:02.586049 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (13.452875ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:02.586646 4678 audit.go:45] 2017-01-25T05:13:02.586632363-05:00 AUDIT: id="e5a54da1-9e90-4cae-a749-60fb3b5594de" response="200" I0125 05:13:02.586705 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (13.567892ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:02.587556 4678 config.go:165] Endpoints handler already has a pending interrupt. I0125 05:13:02.587711 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (44.081283ms) I0125 05:13:02.588260 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:02.603160 4678 audit.go:125] 2017-01-25T05:13:02.603086044-05:00 AUDIT: id="e4c467f5-cc21-4010-a13b-0addf928709a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:02.605314 4678 audit.go:45] 2017-01-25T05:13:02.605289912-05:00 AUDIT: id="e4c467f5-cc21-4010-a13b-0addf928709a" response="200" I0125 05:13:02.605748 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (18.671921ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.605791 4678 audit.go:125] 2017-01-25T05:13:02.605710923-05:00 AUDIT: id="f3c933c9-26e1-41bc-96f4-bd30e31e19c8" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=0" I0125 05:13:02.606437 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 1->1 (need 1), fullyLabeledReplicas 1->1, readyReplicas 0->0, availableReplicas 0->0, sequence No: 2->2 I0125 05:13:02.606617 4678 audit.go:45] 2017-01-25T05:13:02.606600619-05:00 AUDIT: id="f3c933c9-26e1-41bc-96f4-bd30e31e19c8" response="200" I0125 05:13:02.607047 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=0: (7.993582ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:02.607583 4678 audit.go:125] 2017-01-25T05:13:02.607510919-05:00 AUDIT: id="2120bf77-d887-4161-8fd3-d6d87a5a7442" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:13:02.609282 4678 audit.go:45] 2017-01-25T05:13:02.609244334-05:00 AUDIT: id="2120bf77-d887-4161-8fd3-d6d87a5a7442" response="200" I0125 05:13:02.609398 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (2.258232ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.610445 4678 audit.go:125] 2017-01-25T05:13:02.610414822-05:00 AUDIT: id="9df08bf9-0a13-49c0-936f-f50e43e1bbb7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:02.610668 4678 audit.go:125] 2017-01-25T05:13:02.610634921-05:00 AUDIT: id="a2fb5609-2756-46e3-adac-eba430ed3006" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:13:02.615354 4678 audit.go:45] 2017-01-25T05:13:02.615337142-05:00 AUDIT: id="a2fb5609-2756-46e3-adac-eba430ed3006" response="200" I0125 05:13:02.616668 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (8.893083ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:02.617900 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:13:02.618266 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (50.458702ms) I0125 05:13:02.618347 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (42.186µs) I0125 05:13:02.618507 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 1->1 I0125 05:13:02.618576 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (42.183µs) I0125 05:13:02.619612 4678 audit.go:45] 2017-01-25T05:13:02.619597359-05:00 AUDIT: id="9df08bf9-0a13-49c0-936f-f50e43e1bbb7" response="200" I0125 05:13:02.619704 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (9.577125ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.620612 4678 audit.go:125] 2017-01-25T05:13:02.620559139-05:00 AUDIT: id="ad603fa7-be6e-4a09-8f20-81b346915a29" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=11144&timeoutSeconds=480" I0125 05:13:02.621140 4678 audit.go:45] 2017-01-25T05:13:02.621127558-05:00 AUDIT: id="ad603fa7-be6e-4a09-8f20-81b346915a29" response="200" I0125 05:13:02.621561 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/host-path/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:13:02.621599 4678 reconciler.go:230] VerifyControllerAttachedVolume operation started for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094") I0125 05:13:02.622551 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:02.642181 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:02.661284 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:02.681234 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:02.681280 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:02.701847 4678 proxier.go:797] syncProxyRules took 279.832817ms I0125 05:13:02.701876 4678 proxier.go:566] OnEndpointsUpdate took 279.925783ms for 6 endpoints I0125 05:13:02.701926 4678 proxier.go:381] Received update notice: [] I0125 05:13:02.701961 4678 proxier.go:804] Syncing iptables rules I0125 05:13:02.701970 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:02.720408 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:02.729483 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/host-path/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000") to pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:02.729665 4678 reconciler.go:306] MountVolume operation started for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") to pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:02.729801 4678 secret.go:179] Setting up volume default-token-0g2nw for pod daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:02.730111 4678 empty_dir.go:248] pod daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094: mounting tmpfs for volume wrapped_default-token-0g2nw I0125 05:13:02.730125 4678 mount_linux.go:112] Mounting tmpfs /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw tmpfs [] with command: "mount" I0125 05:13:02.730133 4678 mount_linux.go:115] Mounting cmd (mount) with arguments ([-t tmpfs tmpfs /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw]) I0125 05:13:02.753185 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:02.764917 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/host-path/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:02.766414 4678 audit.go:125] 2017-01-25T05:13:02.766347331-05:00 AUDIT: id="8376af33-3b3d-4a6e-b67f-5b7aea3cd9cf" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:13:02.768382 4678 audit.go:45] 2017-01-25T05:13:02.768365118-05:00 AUDIT: id="8376af33-3b3d-4a6e-b67f-5b7aea3cd9cf" response="200" I0125 05:13:02.768642 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (2.645554ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.768962 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:13:02.769078 4678 atomic_writer.go:145] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k volume default-token-0g2nw: write required for target directory /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:02.769752 4678 atomic_writer.go:160] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k volume default-token-0g2nw: performed write of new data to ts data directory: /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw/..1981_25_01_05_13_02.270293566 I0125 05:13:02.770131 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:02.776917 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:02.795593 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:02.804294 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.805690 4678 audit.go:125] 2017-01-25T05:13:02.805619022-05:00 AUDIT: id="44040db5-949a-4130-b3bd-c123c5784451" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:13:02.807585 4678 audit.go:45] 2017-01-25T05:13:02.807570541-05:00 AUDIT: id="44040db5-949a-4130-b3bd-c123c5784451" response="200" I0125 05:13:02.807902 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.606247ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:02.808284 4678 docker_manager.go:1947] Need to restart pod infra container for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" because it is not found I0125 05:13:02.808362 4678 docker_manager.go:1992] Container {Name:postgresql-master Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-master] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:} {Name:POSTGRESQL_ADMIN_PASSWORD Value:newpass ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc428f3ea80 ReadinessProbe:0xc428f3eab0 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc428f3eae0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it. I0125 05:13:02.808396 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:true InfraChanged:false InfraContainerId: InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[0:Container {Name:postgresql-master Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-master] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:} {Name:POSTGRESQL_ADMIN_PASSWORD Value:newpass ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc428f3ea80 ReadinessProbe:0xc428f3eab0 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc428f3eae0 Stdin:false StdinOnce:false TTY:false} is dead, but RestartPolicy says that we should restart it.] ContainersToKeep:map[]} I0125 05:13:02.808434 4678 docker_manager.go:2095] Killing Infra Container for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", will start new one I0125 05:13:02.808473 4678 docker_manager.go:2153] Creating pod infra container for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:02.812008 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k/POD podIP: "" creating hosts mount: false I0125 05:13:02.822238 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:02.857082 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:02.894644 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:02.908943 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k/POD: setting entrypoint "[]" and command "[]" I0125 05:13:02.909865 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:13:02.910244 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:02.914281 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:02.923697 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:02.946865 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:02.946908 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:02.967244 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:02.967310 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:02.970833 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:02 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc434671360 0 [] true false map[] 0xc425d14f00 } I0125 05:13:02.970908 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:02.971021 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:02.971035 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:02.971774 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:02 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4346715c0 0 [] true false map[] 0xc425d151d0 } I0125 05:13:02.971824 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:02.976137 4678 proxier.go:797] syncProxyRules took 274.1709ms I0125 05:13:02.976160 4678 proxier.go:431] OnServiceUpdate took 274.221551ms for 4 services I0125 05:13:02.976198 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:13:02.976353 4678 proxier.go:804] Syncing iptables rules I0125 05:13:02.976364 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:02.990289 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:02.990377 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:13:02.990394 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:13:02.990436 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:02.990444 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:02.990455 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:02.990462 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:02.990470 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:03.003715 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:03.022894 4678 audit.go:125] 2017-01-25T05:13:03.022844336-05:00 AUDIT: id="f53baffe-b1d1-4262-b90a-5b78bd2ab163" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj" I0125 05:13:03.031954 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.049116 4678 audit.go:45] 2017-01-25T05:13:03.049082771-05:00 AUDIT: id="f53baffe-b1d1-4262-b90a-5b78bd2ab163" response="404" I0125 05:13:03.049302 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-1-6jfgj: (29.121102ms) 404 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:03.061214 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.094969 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.107842 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:03.107889 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded W0125 05:13:03.109832 4678 prober.go:98] No ref for container "docker://0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f" (postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master) I0125 05:13:03.109850 4678 prober.go:106] Liveness probe for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" failed (failure): dial tcp 172.17.0.7:5432: i/o timeout I0125 05:13:03.117369 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:03.137176 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:03.159081 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:03.182442 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:03.202594 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:03.202630 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:03.227159 4678 proxier.go:797] syncProxyRules took 250.801051ms I0125 05:13:03.227187 4678 proxier.go:566] OnEndpointsUpdate took 250.905857ms for 6 endpoints I0125 05:13:03.227256 4678 proxier.go:381] Received update notice: [] I0125 05:13:03.227292 4678 proxier.go:804] Syncing iptables rules I0125 05:13:03.227303 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:03.245870 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:03.274854 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.296431 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.317809 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.338314 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:03.353003 4678 audit.go:125] 2017-01-25T05:13:03.352952276-05:00 AUDIT: id="054a98f9-5d74-44e7-87cc-baaec441adf5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:03.353711 4678 audit.go:45] 2017-01-25T05:13:03.353693779-05:00 AUDIT: id="054a98f9-5d74-44e7-87cc-baaec441adf5" response="200" I0125 05:13:03.354182 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.542509ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:03.362907 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:03.384504 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:03.402425 4678 generic.go:145] GenericPLEG: daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650: non-existent -> unknown I0125 05:13:03.411212 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:03.435662 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:03.435706 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:03.487323 4678 worker.go:162] Probe target container not found: postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094) - postgresql-master I0125 05:13:03.490890 4678 proxier.go:797] syncProxyRules took 263.590861ms I0125 05:13:03.490919 4678 proxier.go:431] OnServiceUpdate took 263.649011ms for 4 services I0125 05:13:03.572238 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:13:03.572367 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (1.371µs) I0125 05:13:03.576854 4678 audit.go:125] 2017-01-25T05:13:03.576806152-05:00 AUDIT: id="4e6fe64e-44ed-45bd-bfe2-378670f2d05a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:03.577967 4678 audit.go:45] 2017-01-25T05:13:03.577952529-05:00 AUDIT: id="4e6fe64e-44ed-45bd-bfe2-378670f2d05a" response="200" I0125 05:13:03.578055 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (4.415021ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:03.578387 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:13:03.579307 4678 audit.go:125] 2017-01-25T05:13:03.57927452-05:00 AUDIT: id="dff7f5ca-ffd4-4972-b1e1-85cab3f5b80b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:13:03.579694 4678 audit.go:125] 2017-01-25T05:13:03.57966189-05:00 AUDIT: id="5bf69225-eff3-4373-a71d-d21b27877ab2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:13:03.580031 4678 audit.go:125] 2017-01-25T05:13:03.580004087-05:00 AUDIT: id="6ae74954-6428-434e-871d-6a3dac187f6a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:13:03.580366 4678 audit.go:125] 2017-01-25T05:13:03.580333506-05:00 AUDIT: id="fa960643-e3c8-4d0a-a331-e45ab6dc9ed8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:03.580571 4678 audit.go:45] 2017-01-25T05:13:03.580557147-05:00 AUDIT: id="dff7f5ca-ffd4-4972-b1e1-85cab3f5b80b" response="200" I0125 05:13:03.580633 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (6.10668ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:03.580915 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (7.799802ms) I0125 05:13:03.581516 4678 audit.go:125] 2017-01-25T05:13:03.581483259-05:00 AUDIT: id="473da25c-1872-4941-8f9d-e89a906e0696" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:03.581959 4678 audit.go:45] 2017-01-25T05:13:03.58194611-05:00 AUDIT: id="fa960643-e3c8-4d0a-a331-e45ab6dc9ed8" response="200" I0125 05:13:03.582013 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (7.637326ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:03.582110 4678 audit.go:45] 2017-01-25T05:13:03.582098823-05:00 AUDIT: id="6ae74954-6428-434e-871d-6a3dac187f6a" response="200" I0125 05:13:03.582163 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (8.565632ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:03.582278 4678 audit.go:45] 2017-01-25T05:13:03.582266538-05:00 AUDIT: id="5bf69225-eff3-4373-a71d-d21b27877ab2" response="200" I0125 05:13:03.582328 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (8.277619ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:03.582563 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (9.573455ms) I0125 05:13:03.582637 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:13:03.582914 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (10.526017ms) I0125 05:13:03.583716 4678 audit.go:45] 2017-01-25T05:13:03.583703018-05:00 AUDIT: id="473da25c-1872-4941-8f9d-e89a906e0696" response="200" I0125 05:13:03.583776 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (4.918081ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:03.584406 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:13:03.584457 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (11.610584ms) I0125 05:13:03.584556 4678 proxier.go:804] Syncing iptables rules I0125 05:13:03.584567 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:03.602338 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:03.602423 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:03.602440 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:13:03.602455 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:13:03.602478 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:03.602496 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:03.602513 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:03.602521 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:03.604395 4678 audit.go:125] 2017-01-25T05:13:03.60432493-05:00 AUDIT: id="f5dbcb85-e937-4060-adf1-32698f9834c2" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:03.605383 4678 audit.go:45] 2017-01-25T05:13:03.605367736-05:00 AUDIT: id="f5dbcb85-e937-4060-adf1-32698f9834c2" response="200" I0125 05:13:03.605483 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (22.329615ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:03.605978 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (33.242822ms) I0125 05:13:03.613000 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:03.634062 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.656619 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.671074 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:13:03.684651 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:03.686935 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.716373 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:03.739388 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:03.755027 4678 kubelet_volumes.go:104] Orphaned pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" found, but volumes are not cleaned up I0125 05:13:03.755906 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650.scope" I0125 05:13:03.756165 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:13:03.756320 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:13:03.756336 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:13:03.765157 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42cdbd4a0 Mounts:[] Config:0xc426ecb560 NetworkSettings:0xc42cc10000} E0125 05:13:03.765804 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" container "POD": symlink /var/log/containers/postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8_POD-e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650.log: no such file or directory I0125 05:13:03.765851 4678 docker_manager.go:1833] DNS ResolvConfPath exists: /var/lib/docker/containers/e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650/resolv.conf. Will attempt to add ndots option: options ndots:5 I0125 05:13:03.765916 4678 docker_manager.go:2167] Calling network plugin kubernetes.io/no-op to setup pod for postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:13:03.768269 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:03.786014 4678 generic.go:342] PLEG: Write status for postgresql-master-2-46j9k/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-46j9k", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.2", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc426dfe8c0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:03.786163 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8(b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:03.786450 4678 status_manager.go:402] Status for pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" is up-to-date; skipping I0125 05:13:03.811748 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:13:03.818836 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:03.850974 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:13:03.853334 4678 hairpin.go:110] Enabling hairpin on interface veth8aa7398 I0125 05:13:03.853660 4678 docker_manager.go:2208] Determined pod ip after infra change: "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)": "172.17.0.2" I0125 05:13:03.853690 4678 docker_manager.go:2293] Creating container &{Name:postgresql-master Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 Command:[] Args:[run-postgresql-master] WorkingDir: Ports:[{Name: HostPort:0 ContainerPort:5432 Protocol:TCP HostIP:}] Env:[{Name:POSTGRESQL_MASTER_USER Value:master ValueFrom:} {Name:POSTGRESQL_MASTER_PASSWORD Value:qcoktIqkwDX8 ValueFrom:} {Name:POSTGRESQL_USER Value:user ValueFrom:} {Name:POSTGRESQL_PASSWORD Value:IbyV1wgYrrMd ValueFrom:} {Name:POSTGRESQL_DATABASE Value:userdb ValueFrom:} {Name:POSTGRESQL_ADMIN_PASSWORD Value:newpass ValueFrom:}] Resources:{Limits:map[] Requests:map[]} VolumeMounts:[{Name:postgresql-data ReadOnly:false MountPath:/var/lib/pgsql/data SubPath:} {Name:default-token-0g2nw ReadOnly:true MountPath:/var/run/secrets/kubernetes.io/serviceaccount SubPath:}] LivenessProbe:0xc428f3ea80 ReadinessProbe:0xc428f3eab0 Lifecycle: TerminationMessagePath:/dev/termination-log ImagePullPolicy:IfNotPresent SecurityContext:0xc428f3eae0 Stdin:false StdinOnce:false TTY:false} in pod postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:13:03.854637 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:03.854667 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:03.877345 4678 kubelet_pods.go:107] container: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k/postgresql-master podIP: "172.17.0.2" creating hosts mount: true I0125 05:13:03.878613 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11140", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Pulled' Container image "centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389" already present on machine I0125 05:13:03.879396 4678 audit.go:125] 2017-01-25T05:13:03.879358499-05:00 AUDIT: id="284f0dc5-e347-4a51-9ea8-48cdfcc72112" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:03.882843 4678 audit.go:45] 2017-01-25T05:13:03.882824626-05:00 AUDIT: id="284f0dc5-e347-4a51-9ea8-48cdfcc72112" response="201" I0125 05:13:03.882917 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.936348ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:03.895281 4678 proxier.go:797] syncProxyRules took 310.71905ms I0125 05:13:03.895309 4678 proxier.go:566] OnEndpointsUpdate took 310.816296ms for 6 endpoints I0125 05:13:03.895355 4678 proxier.go:381] Received update notice: [] I0125 05:13:03.895400 4678 proxier.go:804] Syncing iptables rules I0125 05:13:03.895410 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:03.935254 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:03.968209 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:03.988440 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:13:04.000281 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:04.015699 4678 docker_manager.go:784] Container extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k/postgresql-master: setting entrypoint "[]" and command "[run-postgresql-master]" I0125 05:13:04.029240 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:04.062698 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:04.079122 4678 audit.go:125] 2017-01-25T05:13:04.079076188-05:00 AUDIT: id="08711cbd-8ceb-43a4-9316-78bcb9f5f649" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2" I0125 05:13:04.081405 4678 audit.go:45] 2017-01-25T05:13:04.081386526-05:00 AUDIT: id="08711cbd-8ceb-43a4-9316-78bcb9f5f649" response="200" I0125 05:13:04.081736 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2: (19.151497ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:04.091743 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:04.110895 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:04.131893 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:13:04.131984 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:13:04.132029 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:04.132099 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:04.132121 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:13:04.132135 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:04.132144 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:04.132337 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:04.132348 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:04.132355 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:04.132360 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:04.132411 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:04.132422 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:13:04.132484 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:13:04.132495 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:04.132516 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:04.132536 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:04.132619 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:13:04.132647 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:13:04.132716 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:13:04.132762 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:04.132778 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:13:04.132799 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:04.132827 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:13:04.132839 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:04.132846 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:04.138086 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:04.164326 4678 manager.go:898] Added container: "/system.slice/docker-e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650.scope" (aliases: [k8s_POD.73b4fecf_postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8_daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094_6093cf99 e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650], namespace: "docker") I0125 05:13:04.164647 4678 handler.go:325] Added event &{/system.slice/docker-e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650.scope 2017-01-25 05:13:03.54798488 -0500 EST containerCreation {}} I0125 05:13:04.164726 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-daedc0da\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:13:04.164736 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:13:04.164759 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:13:04.164774 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:13:04.164804 4678 container.go:407] Start housekeeping for container "/system.slice/docker-e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650.scope" I0125 05:13:04.188532 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:04.188577 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:04.227571 4678 proxier.go:797] syncProxyRules took 332.162605ms I0125 05:13:04.227601 4678 proxier.go:431] OnServiceUpdate took 332.231407ms for 4 services I0125 05:13:04.331080 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") from pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:04.331167 4678 util.go:340] Tearing down volume default-token-0g2nw for pod b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:04.331217 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (OuterVolumeSpecName: "postgresql-data") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000". PluginName "kubernetes.io/host-path", VolumeGidValue "" I0125 05:13:04.331231 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:04.331156 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/host-path/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "postgresql-data") from pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:04.350719 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (OuterVolumeSpecName: "default-token-0g2nw") pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "default-token-0g2nw". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:13:04.791308 4678 generic.go:145] GenericPLEG: daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650: unknown -> running I0125 05:13:04.924287 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42de062c0 Mounts:[] Config:0xc42f7dfe60 NetworkSettings:0xc42b390900} I0125 05:13:04.926607 4678 generic.go:342] PLEG: Write status for postgresql-master-2-46j9k/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-46j9k", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.2", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc428ed5ce0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:04.926661 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650"} I0125 05:13:04.927231 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11140", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Created' Created container with docker id ebd85b26ebba; Security:[seccomp=unconfined] I0125 05:13:04.927866 4678 audit.go:125] 2017-01-25T05:13:04.927821632-05:00 AUDIT: id="74557f37-bf30-4a12-bf3f-6ffcac813c12" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:04.930698 4678 audit.go:45] 2017-01-25T05:13:04.93068013-05:00 AUDIT: id="74557f37-bf30-4a12-bf3f-6ffcac813c12" response="201" I0125 05:13:04.930763 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.218833ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:05.053712 4678 audit.go:125] 2017-01-25T05:13:05.053652669-05:00 AUDIT: id="f6bec971-9c71-48cb-945e-9d6abdd81bd8" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2" I0125 05:13:05.055865 4678 audit.go:45] 2017-01-25T05:13:05.055841864-05:00 AUDIT: id="f6bec971-9c71-48cb-945e-9d6abdd81bd8" response="200" I0125 05:13:05.056155 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2: (4.178544ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:05.084338 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11140", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Started' Started container with docker id ebd85b26ebba I0125 05:13:05.085012 4678 audit.go:125] 2017-01-25T05:13:05.084969305-05:00 AUDIT: id="f9cbd796-74c4-418b-8308-a0bc98d7e67a" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:05.087799 4678 audit.go:45] 2017-01-25T05:13:05.087784778-05:00 AUDIT: id="f9cbd796-74c4-418b-8308-a0bc98d7e67a" response="201" I0125 05:13:05.087847 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.166775ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:05.089934 4678 factory.go:111] Using factory "docker" for container "/system.slice/docker-ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d.scope" E0125 05:13:05.091448 4678 docker_manager.go:1742] Failed to create symbolic link to the log file of pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master": symlink /var/log/containers/postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d.log: no such file or directory I0125 05:13:05.241162 4678 audit.go:125] 2017-01-25T05:13:05.241118921-05:00 AUDIT: id="6df93682-d912-4531-a12f-57e52691f149" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:13:05.242399 4678 audit.go:45] 2017-01-25T05:13:05.242383152-05:00 AUDIT: id="6df93682-d912-4531-a12f-57e52691f149" response="200" I0125 05:13:05.242486 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.657259ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:05.288475 4678 manager.go:898] Added container: "/system.slice/docker-ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d.scope" (aliases: [k8s_postgresql-master.dfff9f08_postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8_daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094_566678ec ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d], namespace: "docker") I0125 05:13:05.288635 4678 handler.go:325] Added event &{/system.slice/docker-ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d.scope 2017-01-25 05:13:05.019987535 -0500 EST containerCreation {}} I0125 05:13:05.288687 4678 container.go:407] Start housekeeping for container "/system.slice/docker-ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d.scope" I0125 05:13:05.684651 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:05.691461 4678 kubelet_volumes.go:113] Orphaned pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:13:05.931990 4678 generic.go:145] GenericPLEG: daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d: non-existent -> running I0125 05:13:05.938277 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4281c6c60 Mounts:[{Name: Source:/tmp/openshift-extended-tests/persistent-volumes816894978/0000099920249 Destination:/var/lib/pgsql/data Driver: Mode: RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql-master/566678ec Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42e43efc0 NetworkSettings:0xc42eb67400} I0125 05:13:05.941994 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42d2e0b00 Mounts:[] Config:0xc431149e60 NetworkSettings:0xc429341f00} I0125 05:13:05.943687 4678 generic.go:342] PLEG: Write status for postgresql-master-2-46j9k/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-46j9k", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.2", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a189960), (*container.ContainerStatus)(0xc428ec1500)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:05.943754 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:05.943756 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerStarted", Data:"ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d"} I0125 05:13:05.943937 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:05.944594 4678 audit.go:125] 2017-01-25T05:13:05.944563671-05:00 AUDIT: id="1a951980-f5c7-470e-a435-2c09e392b82d" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k" I0125 05:13:05.945885 4678 audit.go:45] 2017-01-25T05:13:05.945874536-05:00 AUDIT: id="1a951980-f5c7-470e-a435-2c09e392b82d" response="200" I0125 05:13:05.945975 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k: (1.6245ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:05.946860 4678 audit.go:125] 2017-01-25T05:13:05.946833765-05:00 AUDIT: id="89608898-40f2-49eb-90ea-ced3912eae91" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status" I0125 05:13:05.948595 4678 audit.go:45] 2017-01-25T05:13:05.94857904-05:00 AUDIT: id="89608898-40f2-49eb-90ea-ced3912eae91" response="200" I0125 05:13:05.948671 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status: (2.011763ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:05.949368 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11144 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11155 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:05.949576 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (40.182µs) I0125 05:13:05.949614 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11144 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11155 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:05.949712 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:13:05.949623 4678 status_manager.go:425] Status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:0xc429674340 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc434f94da0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d}]} version:2 podName:postgresql-master-2-46j9k podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:05.949760 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:13:05.949779 4678 config.go:281] Setting pods for source api I0125 05:13:05.949786 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:13:05.949821 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:13:05.949844 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:13:05.949861 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:13:05.949867 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:13:05.950163 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:13:05.951055 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:05.951564 4678 secret.go:179] Setting up volume default-token-0g2nw for pod daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:05.952247 4678 audit.go:125] 2017-01-25T05:13:05.952210421-05:00 AUDIT: id="8be5e220-012c-45a6-8e2e-5e8cf5198472" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:13:05.952915 4678 audit.go:125] 2017-01-25T05:13:05.952891961-05:00 AUDIT: id="511bbe19-df23-4034-885e-8fe5ab4600d6" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:13:05.954459 4678 audit.go:45] 2017-01-25T05:13:05.954446271-05:00 AUDIT: id="511bbe19-df23-4034-885e-8fe5ab4600d6" response="200" I0125 05:13:05.954643 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (1.911875ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:05.954836 4678 audit.go:125] 2017-01-25T05:13:05.954803046-05:00 AUDIT: id="e2f6fc6d-bd0b-4aaa-af0c-1f95451b282e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:05.954937 4678 audit.go:45] 2017-01-25T05:13:05.954922904-05:00 AUDIT: id="8be5e220-012c-45a6-8e2e-5e8cf5198472" response="200" I0125 05:13:05.955012 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (3.048466ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:05.955490 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:13:05.955743 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:05.955782 4678 audit.go:45] 2017-01-25T05:13:05.95576972-05:00 AUDIT: id="e2f6fc6d-bd0b-4aaa-af0c-1f95451b282e" response="200" I0125 05:13:05.955841 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (5.2994ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:05.955901 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:05.956142 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 1 I0125 05:13:05.956345 4678 audit.go:125] 2017-01-25T05:13:05.956316578-05:00 AUDIT: id="25d54bcc-68c9-4c52-ac31-0659172e3750" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:05.957327 4678 audit.go:45] 2017-01-25T05:13:05.957313431-05:00 AUDIT: id="25d54bcc-68c9-4c52-ac31-0659172e3750" response="200" I0125 05:13:05.957413 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (1.342628ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:05.958411 4678 audit.go:125] 2017-01-25T05:13:05.958377636-05:00 AUDIT: id="1e1f70bd-e3e1-41ad-920a-58a5eca38fec" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:05.959778 4678 audit.go:45] 2017-01-25T05:13:05.959758974-05:00 AUDIT: id="1e1f70bd-e3e1-41ad-920a-58a5eca38fec" response="200" I0125 05:13:05.959832 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (3.283002ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:05.960043 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (10.115776ms) I0125 05:13:05.960273 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:13:05.960398 4678 proxier.go:804] Syncing iptables rules I0125 05:13:05.960416 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:05.969245 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:05.969337 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:05.969349 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:05.969357 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:05.969365 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:13:05.969419 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:05.969428 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:05.969435 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:13:05.979515 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:05.998481 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:06.031581 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:06.062282 4678 audit.go:125] 2017-01-25T05:13:06.062211812-05:00 AUDIT: id="2e60c93a-92e5-47f0-916a-0b4fc6f87812" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2" I0125 05:13:06.064484 4678 audit.go:45] 2017-01-25T05:13:06.064466648-05:00 AUDIT: id="2e60c93a-92e5-47f0-916a-0b4fc6f87812" response="200" I0125 05:13:06.064943 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2: (5.573715ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:06.072197 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:06.092002 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:06.110936 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:06.129498 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:06.148261 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:06.168052 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:06.168085 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:06.199425 4678 proxier.go:797] syncProxyRules took 239.019733ms I0125 05:13:06.199460 4678 proxier.go:566] OnEndpointsUpdate took 239.12493ms for 6 endpoints I0125 05:13:06.199525 4678 proxier.go:381] Received update notice: [] I0125 05:13:06.199574 4678 proxier.go:804] Syncing iptables rules I0125 05:13:06.199588 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:06.232952 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:06.244323 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:06.245447 4678 audit.go:125] 2017-01-25T05:13:06.245401577-05:00 AUDIT: id="0cd48676-98c3-4bee-8f85-a171453a8a95" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:13:06.247513 4678 audit.go:45] 2017-01-25T05:13:06.247496986-05:00 AUDIT: id="0cd48676-98c3-4bee-8f85-a171453a8a95" response="200" I0125 05:13:06.247831 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (2.74752ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:06.248283 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:06.248360 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:06.248381 4678 docker_manager.go:1999] pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master" exists as ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d I0125 05:13:06.248493 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650:-1 ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d:0]} I0125 05:13:06.254972 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:06.274541 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:06.293749 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:06.313596 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:06.332737 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:06.351950 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:06.371452 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:06.391727 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:06.391760 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:06.412493 4678 proxier.go:797] syncProxyRules took 212.913208ms I0125 05:13:06.412529 4678 proxier.go:431] OnServiceUpdate took 212.981141ms for 4 services I0125 05:13:06.774326 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:06.774349 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:06.873068 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:13:06.949051 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:06.949232 4678 status_manager.go:312] Ignoring same status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:02 -0500 EST Reason: Message:} {Type:Ready Status:False LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:02 -0500 EST Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:2017-01-25 05:13:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc433edbe40 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d}]} I0125 05:13:06.949345 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:06.971296 4678 audit.go:125] 2017-01-25T05:13:06.971256467-05:00 AUDIT: id="b5e1f30a-3bf0-42b0-888f-997ad39a1919" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:13:06.972546 4678 audit.go:45] 2017-01-25T05:13:06.972531635-05:00 AUDIT: id="b5e1f30a-3bf0-42b0-888f-997ad39a1919" response="200" I0125 05:13:06.972649 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (1.637097ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:06.973382 4678 audit.go:125] 2017-01-25T05:13:06.973350894-05:00 AUDIT: id="8b7bad7e-2677-4f16-a9d8-0a193366bd01" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:06.973498 4678 secret.go:179] Setting up volume default-token-0g2nw for pod daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:06.974087 4678 audit.go:125] 2017-01-25T05:13:06.974054444-05:00 AUDIT: id="88e4d9c2-027d-41f9-a399-7a08509094e2" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:13:06.974545 4678 audit.go:45] 2017-01-25T05:13:06.974532015-05:00 AUDIT: id="8b7bad7e-2677-4f16-a9d8-0a193366bd01" response="200" I0125 05:13:06.974635 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (1.489047ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:06.975104 4678 audit.go:45] 2017-01-25T05:13:06.975094784-05:00 AUDIT: id="88e4d9c2-027d-41f9-a399-7a08509094e2" response="200" I0125 05:13:06.975273 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (1.437956ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:06.975372 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:13:06.975565 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:06.975672 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:07.068327 4678 audit.go:125] 2017-01-25T05:13:07.068280811-05:00 AUDIT: id="edafc523-bd99-4a3b-9437-eeb6e803d9f2" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:07.069763 4678 audit.go:45] 2017-01-25T05:13:07.069752245-05:00 AUDIT: id="edafc523-bd99-4a3b-9437-eeb6e803d9f2" response="200" I0125 05:13:07.069838 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.290469ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:07.072634 4678 proxier.go:804] Syncing iptables rules I0125 05:13:07.072648 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:07.092065 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:07.110784 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:07.129303 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:07.147949 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:07.166784 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:07.185931 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:07.204778 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:07.223586 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:07.248607 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:07.248645 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:07.259427 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:07.260818 4678 audit.go:125] 2017-01-25T05:13:07.26076366-05:00 AUDIT: id="688a79df-b28c-4ade-94f4-36baa02a688c" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:13:07.263352 4678 audit.go:45] 2017-01-25T05:13:07.263332151-05:00 AUDIT: id="688a79df-b28c-4ade-94f4-36baa02a688c" response="200" I0125 05:13:07.263699 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (3.267764ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:07.264185 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:07.264280 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:07.264300 4678 docker_manager.go:1999] pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master" exists as ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d I0125 05:13:07.264457 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650:-1 ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d:0]} I0125 05:13:07.280117 4678 proxier.go:797] syncProxyRules took 207.476339ms I0125 05:13:07.280154 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:13:07.299516 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:13:07.319056 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:13:07.337469 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:13:07.368910 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:13:07.406948 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:13:07.426096 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:13:07.444800 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:13:07.463765 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:13:07.482347 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:13:07.570138 4678 audit.go:125] 2017-01-25T05:13:07.570096239-05:00 AUDIT: id="33e6bad0-7b4d-4583-9a92-4425a993267b" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:13:07.570604 4678 audit.go:45] 2017-01-25T05:13:07.570590756-05:00 AUDIT: id="33e6bad0-7b4d-4583-9a92-4425a993267b" response="200" I0125 05:13:07.570935 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.071535ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:07.638771 4678 audit.go:125] 2017-01-25T05:13:07.638734768-05:00 AUDIT: id="5998da8c-79bf-4612-96d8-8dfc5eaa3328" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:13:07.641016 4678 audit.go:45] 2017-01-25T05:13:07.64099734-05:00 AUDIT: id="5998da8c-79bf-4612-96d8-8dfc5eaa3328" response="200" I0125 05:13:07.641357 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.835662ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:07.642369 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:13:07.684657 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:08.068269 4678 audit.go:125] 2017-01-25T05:13:08.068218794-05:00 AUDIT: id="3a0568fb-1b63-43e6-aaee-cdaa09e62ae7" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:08.069720 4678 audit.go:45] 2017-01-25T05:13:08.069706737-05:00 AUDIT: id="3a0568fb-1b63-43e6-aaee-cdaa09e62ae7" response="200" I0125 05:13:08.069809 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.248896ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:08.355759 4678 audit.go:125] 2017-01-25T05:13:08.355716814-05:00 AUDIT: id="d087b1f9-47eb-4511-aa32-a554988d5151" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:08.356182 4678 audit.go:45] 2017-01-25T05:13:08.356173318-05:00 AUDIT: id="d087b1f9-47eb-4511-aa32-a554988d5151" response="200" I0125 05:13:08.356498 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (993.852µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:08.356828 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:13:09.068299 4678 audit.go:125] 2017-01-25T05:13:09.068256523-05:00 AUDIT: id="57525ccc-6677-4ae0-9018-e62d1213f2ec" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:09.069649 4678 audit.go:45] 2017-01-25T05:13:09.069636986-05:00 AUDIT: id="57525ccc-6677-4ae0-9018-e62d1213f2ec" response="200" I0125 05:13:09.069737 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.123565ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:09.211529 4678 audit.go:125] 2017-01-25T05:13:09.211490943-05:00 AUDIT: id="7d3d1c79-b83d-4420-8231-4bd7b3427913" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:13:09.212382 4678 audit.go:45] 2017-01-25T05:13:09.212370254-05:00 AUDIT: id="7d3d1c79-b83d-4420-8231-4bd7b3427913" response="200" I0125 05:13:09.212472 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.885878ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:09.212748 4678 controller.go:106] Found 0 cronjobs I0125 05:13:09.214582 4678 audit.go:125] 2017-01-25T05:13:09.214559451-05:00 AUDIT: id="6304cb08-3848-413c-9be9-08bf523fc4ee" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:13:09.215382 4678 audit.go:45] 2017-01-25T05:13:09.215372135-05:00 AUDIT: id="6304cb08-3848-413c-9be9-08bf523fc4ee" response="200" I0125 05:13:09.215447 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.429319ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:09.215618 4678 controller.go:114] Found 0 jobs I0125 05:13:09.215627 4678 controller.go:117] Found 0 groups I0125 05:13:09.261020 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:09.261047 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:09.261685 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:09.261702 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:09.262469 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc434385340 -1 [] true false map[] 0xc43534e4b0 } I0125 05:13:09.262501 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:09.262569 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc434385420 -1 [] true false map[] 0xc42f42fc20 } I0125 05:13:09.262588 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:09.684522 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:10.068227 4678 audit.go:125] 2017-01-25T05:13:10.068170708-05:00 AUDIT: id="71875595-8d10-4155-83b7-106bfc057762" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:10.069617 4678 audit.go:45] 2017-01-25T05:13:10.069603227-05:00 AUDIT: id="71875595-8d10-4155-83b7-106bfc057762" response="200" I0125 05:13:10.069694 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.149536ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:10.547561 4678 helpers.go:101] Unable to get network stats from pid 10277: couldn't read network stats: failure opening /proc/10277/net/dev: open /proc/10277/net/dev: no such file or directory I0125 05:13:10.567090 4678 panics.go:76] GET /apis/storage.k8s.io/v1beta1/watch/storageclasses?resourceVersion=4&timeoutSeconds=375: (6m15.002256493s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:13:10.567379 4678 reflector.go:392] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: Watch close - *storage.StorageClass total 0 items received I0125 05:13:10.569450 4678 audit.go:125] 2017-01-25T05:13:10.569416037-05:00 AUDIT: id="68f5d265-396f-476c-ab58-76638b68de85" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="" uri="/apis/storage.k8s.io/v1beta1/watch/storageclasses?resourceVersion=4&timeoutSeconds=461" I0125 05:13:10.569771 4678 audit.go:45] 2017-01-25T05:13:10.569761145-05:00 AUDIT: id="68f5d265-396f-476c-ab58-76638b68de85" response="200" I0125 05:13:10.823556 4678 helpers.go:101] Unable to get network stats from pid 10035: couldn't read network stats: failure opening /proc/10035/net/dev: open /proc/10035/net/dev: no such file or directory I0125 05:13:11.068288 4678 audit.go:125] 2017-01-25T05:13:11.068245928-05:00 AUDIT: id="6a79dc2b-8266-42b4-b6b5-a6e1de01df97" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:11.069597 4678 audit.go:45] 2017-01-25T05:13:11.069584304-05:00 AUDIT: id="6a79dc2b-8266-42b4-b6b5-a6e1de01df97" response="200" I0125 05:13:11.069673 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.03412ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:11.684614 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:11.932019 4678 audit.go:125] 2017-01-25T05:13:11.93197783-05:00 AUDIT: id="339ecf6f-bb94-48e6-b21a-1bbfbb4c3f80" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:13:11.933153 4678 audit.go:45] 2017-01-25T05:13:11.933138489-05:00 AUDIT: id="339ecf6f-bb94-48e6-b21a-1bbfbb4c3f80" response="200" I0125 05:13:11.933250 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.479604ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:12.068226 4678 audit.go:125] 2017-01-25T05:13:12.068176174-05:00 AUDIT: id="100cb2e7-43e1-41ef-b1be-cadd9279771a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:12.069641 4678 audit.go:45] 2017-01-25T05:13:12.069628104-05:00 AUDIT: id="100cb2e7-43e1-41ef-b1be-cadd9279771a" response="200" I0125 05:13:12.069713 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.145792ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:12.498920 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:12.499162 4678 status_manager.go:190] Container readiness unchanged (false): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" - "docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d" I0125 05:13:12.553821 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:12.553848 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:13:12.554546 4678 audit.go:125] 2017-01-25T05:13:12.554515738-05:00 AUDIT: id="ead4b2b4-7435-4d5c-8889-fb20cda72d9e" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k" I0125 05:13:12.555764 4678 audit.go:45] 2017-01-25T05:13:12.55575046-05:00 AUDIT: id="ead4b2b4-7435-4d5c-8889-fb20cda72d9e" response="200" I0125 05:13:12.555862 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k: (1.540124ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:12.556639 4678 audit.go:125] 2017-01-25T05:13:12.55661668-05:00 AUDIT: id="503ec5a6-0dcf-4e25-9bc5-aec882f1856b" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status" I0125 05:13:12.558243 4678 audit.go:45] 2017-01-25T05:13:12.558227663-05:00 AUDIT: id="503ec5a6-0dcf-4e25-9bc5-aec882f1856b" response="200" I0125 05:13:12.558324 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status: (1.862295ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:12.558651 4678 status_manager.go:425] Status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935992 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:0xc429674340 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc432c8c400 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d}]} version:3 podName:postgresql-master-2-46j9k podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:12.558855 4678 config.go:281] Setting pods for source api I0125 05:13:12.559502 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11155 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2] Annotations:map[openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11158 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:12.559694 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 1->1 (need 1), fullyLabeledReplicas 1->1, readyReplicas 0->1, availableReplicas 0->1, sequence No: 2->2 I0125 05:13:12.560057 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11155 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11158 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:12.560154 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:12.560220 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:13:12.560257 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:13:12.560281 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:13:12.560312 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:13:12.560339 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:13:12.560359 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:13:12.560365 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:13:12.560677 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:13:12.560840 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k ready condition last transition time 2017-01-25 05:13:12 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:12.560829281 -0500 EST. I0125 05:13:12.562249 4678 audit.go:125] 2017-01-25T05:13:12.562191835-05:00 AUDIT: id="d8b73556-09b3-4a98-b1d6-ae6e1f10c3ff" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:12.562980 4678 audit.go:125] 2017-01-25T05:13:12.562942765-05:00 AUDIT: id="3d309cbb-ece5-4d5a-bada-e824b84da7ae" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:13:12.564304 4678 audit.go:125] 2017-01-25T05:13:12.564267374-05:00 AUDIT: id="9896a96f-008c-4742-8792-e8e3dfacaa38" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:12.565867 4678 audit.go:45] 2017-01-25T05:13:12.565856731-05:00 AUDIT: id="d8b73556-09b3-4a98-b1d6-ae6e1f10c3ff" response="200" I0125 05:13:12.565969 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (4.022865ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:12.566313 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:12.567049 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:12.567156 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k ready condition last transition time 2017-01-25 05:13:12 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:12.567143934 -0500 EST. I0125 05:13:12.567263 4678 audit.go:45] 2017-01-25T05:13:12.567249819-05:00 AUDIT: id="9896a96f-008c-4742-8792-e8e3dfacaa38" response="200" I0125 05:13:12.567326 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (5.955645ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:12.567619 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 1 not ready: 0 I0125 05:13:12.568047 4678 audit.go:45] 2017-01-25T05:13:12.568032891-05:00 AUDIT: id="3d309cbb-ece5-4d5a-bada-e824b84da7ae" response="200" I0125 05:13:12.569158 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (8.670485ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:13:12.569666 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (10.013419ms) I0125 05:13:12.569830 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 1->1 I0125 05:13:12.569892 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (35.383µs) I0125 05:13:12.570050 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:13:12.570152 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k ready condition last transition time 2017-01-25 05:13:12 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:12.570140812 -0500 EST. I0125 05:13:12.570225 4678 audit.go:125] 2017-01-25T05:13:12.570175242-05:00 AUDIT: id="9735bb08-e0dd-4879-a7d0-89ce02c43e13" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:12.571255 4678 audit.go:125] 2017-01-25T05:13:12.571222126-05:00 AUDIT: id="5bcefe32-b757-43d4-9f36-8e3c5ef3f739" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:12.572268 4678 audit.go:45] 2017-01-25T05:13:12.572252895-05:00 AUDIT: id="9735bb08-e0dd-4879-a7d0-89ce02c43e13" response="200" I0125 05:13:12.572327 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (4.312038ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:12.572769 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:13:12.572833 4678 roundrobin.go:257] LoadBalancerRR: Setting endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master: to [172.17.0.2:5432] I0125 05:13:12.572863 4678 roundrobin.go:83] LoadBalancerRR service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master:" did not exist, created I0125 05:13:12.572939 4678 proxier.go:616] Setting endpoints for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master:" to [172.17.0.2:5432] I0125 05:13:12.573003 4678 proxier.go:804] Syncing iptables rules I0125 05:13:12.573015 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:12.587735 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (27.292127ms) I0125 05:13:12.588668 4678 audit.go:45] 2017-01-25T05:13:12.588648344-05:00 AUDIT: id="5bcefe32-b757-43d4-9f36-8e3c5ef3f739" response="200" I0125 05:13:12.588920 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (17.967772ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:12.590361 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:12.590560 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:12.590751 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k ready condition last transition time 2017-01-25 05:13:12 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:12.590740229 -0500 EST. I0125 05:13:12.591212 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:12.591242 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:12.591260 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:12.591271 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:12.591297 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:13:12.591317 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:12.591326 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:12.591334 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:13:12.591344 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:13:12.602831 4678 audit.go:125] 2017-01-25T05:13:12.602765754-05:00 AUDIT: id="eb712a81-5b55-4c19-ad66-394f840562ec" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-master-2%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Dd712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:13:12.604240 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:12.620251 4678 panics.go:76] GET /api/v1/watch/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-master-2%2Cdeploymentconfig%3Dpostgresql-master%2Cname%3Dpostgresql-master&resourceVersion=11144&timeoutSeconds=480: (10.007686097s) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:12.623943 4678 audit.go:45] 2017-01-25T05:13:12.623920651-05:00 AUDIT: id="eb712a81-5b55-4c19-ad66-394f840562ec" response="200" I0125 05:13:12.624148 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-master-2%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Dd712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094: (23.714503ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:12.631547 4678 audit.go:125] 2017-01-25T05:13:12.631488642-05:00 AUDIT: id="a86e4f6b-44ef-4d53-960d-2a3362f89acd" ip="172.17.0.4" method="GET" user="system:serviceaccount:extended-test-postgresql-replication-1-34bbd-xd4g8:deployer" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-master-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db383709d-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:13:12.634561 4678 audit.go:45] 2017-01-25T05:13:12.634544325-05:00 AUDIT: id="a86e4f6b-44ef-4d53-960d-2a3362f89acd" response="200" I0125 05:13:12.634704 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events?fieldSelector=involvedObject.kind%3DReplicationController%2CinvolvedObject.name%3Dpostgresql-master-1%2CinvolvedObject.namespace%3Dextended-test-postgresql-replication-1-34bbd-xd4g8%2CinvolvedObject.uid%3Db383709d-e2e6-11e6-a4b0-0e6a5cbf0094: (5.581998ms) 200 [[openshift-deploy/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.17.0.4:50310] I0125 05:13:12.636593 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:12.664583 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:12.684506 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:12.717139 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:12.755696 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:12.772146 4678 helpers.go:101] Unable to get network stats from pid 10709: couldn't read network stats: failure opening /proc/10709/net/dev: open /proc/10709/net/dev: no such file or directory I0125 05:13:12.785337 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:12.804782 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:12.825466 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:12.825504 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:12.846818 4678 proxier.go:797] syncProxyRules took 273.809236ms I0125 05:13:12.846845 4678 proxier.go:566] OnEndpointsUpdate took 273.949832ms for 6 endpoints I0125 05:13:12.846890 4678 proxier.go:381] Received update notice: [] I0125 05:13:12.846926 4678 proxier.go:804] Syncing iptables rules I0125 05:13:12.846936 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:12.869988 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:12.889234 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:12.904549 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:13:12.904955 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:12.906281 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:12.914606 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:12.938659 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:12.972265 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:12.987952 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:12.988020 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:12.988574 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:12.988590 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:12.991475 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:12 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42f335d80 0 [] true false map[] 0xc43521d590 } I0125 05:13:12.991553 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:12.991605 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:12 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42f335e40 0 [] true false map[] 0xc43521d770 } I0125 05:13:12.991651 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:13.003710 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:13.015488 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:13.015560 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:13.018002 4678 generic.go:145] GenericPLEG: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28: running -> exited I0125 05:13:13.029710 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42460a9a0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/08cf920c Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc426baf560 NetworkSettings:0xc426d6f500} I0125 05:13:13.030542 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:13.044311 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc428a50840 Mounts:[] Config:0xc42c262b40 NetworkSettings:0xc42e60ed00} I0125 05:13:13.046321 4678 generic.go:342] PLEG: Write status for postgresql-master-2-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"172.17.0.4", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc428adce00), (*container.ContainerStatus)(0xc428adcee0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:13.046433 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28"} I0125 05:13:13.046527 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.046573 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:13:13.046738 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.047710 4678 audit.go:125] 2017-01-25T05:13:13.047670583-05:00 AUDIT: id="0937061b-37d7-4aa3-976a-e77fc9a589ba" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy" I0125 05:13:13.049486 4678 audit.go:45] 2017-01-25T05:13:13.049471671-05:00 AUDIT: id="0937061b-37d7-4aa3-976a-e77fc9a589ba" response="200" I0125 05:13:13.049602 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy: (2.255953ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:13.050716 4678 audit.go:125] 2017-01-25T05:13:13.050683491-05:00 AUDIT: id="357ea4e9-6186-4a0a-b9ae-374f7243a232" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status" I0125 05:13:13.053456 4678 audit.go:45] 2017-01-25T05:13:13.053440471-05:00 AUDIT: id="357ea4e9-6186-4a0a-b9ae-374f7243a232" response="200" I0125 05:13:13.053525 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status: (3.057533ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:13.054476 4678 replication_controller.go:378] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11117 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11163 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:13.054623 4678 replication_controller.go:255] No controllers found for pod postgresql-master-2-deploy, replication manager will avoid syncing I0125 05:13:13.054654 4678 replica_set.go:320] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11117 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11163 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:13.054743 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-deploy, ReplicaSet controller will avoid syncing I0125 05:13:13.054773 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-deploy, job controller will avoid syncing I0125 05:13:13.054796 4678 daemoncontroller.go:332] Pod postgresql-master-2-deploy updated. I0125 05:13:13.054818 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-deploy, daemon set controller will avoid syncing I0125 05:13:13.054835 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-deploy" I0125 05:13:13.054848 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:13:13.054853 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-deploy" I0125 05:13:13.054922 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-deploy, StatefulSet controller will avoid syncing I0125 05:13:13.056115 4678 status_manager.go:425] Status for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Succeeded Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935976 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935993 nsec:0 loc:0xa2479e0}} Reason:PodCompleted Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935976 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.4 StartTime:0xc433575160 InitContainerStatuses:[] ContainerStatuses:[{Name:deployment State:{Waiting: Running: Terminated:0xc427607ce0} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:openshift/origin-deployer:86a9783 ImageID:docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8 ContainerID:docker://3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28}]} version:3 podName:postgresql-master-2-deploy podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:13:13.058333 4678 config.go:281] Setting pods for source api I0125 05:13:13.059368 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:13.071833 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.073761 4678 audit.go:125] 2017-01-25T05:13:13.073696613-05:00 AUDIT: id="a8c8c046-df07-4c65-b6e2-dc193f71007f" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:13:13.078235 4678 audit.go:125] 2017-01-25T05:13:13.078124221-05:00 AUDIT: id="02203a9f-8327-4c4d-a05f-ece1f3ff5147" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:13.078702 4678 audit.go:45] 2017-01-25T05:13:13.078688147-05:00 AUDIT: id="a8c8c046-df07-4c65-b6e2-dc193f71007f" response="200" I0125 05:13:13.081489 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (24.905186ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:13:13.081726 4678 audit.go:45] 2017-01-25T05:13:13.081710792-05:00 AUDIT: id="02203a9f-8327-4c4d-a05f-ece1f3ff5147" response="200" I0125 05:13:13.081876 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (9.800605ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:13.082186 4678 controller.go:225] Updated deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 status from Running to Complete (scale: 1) I0125 05:13:13.082523 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 1->1 I0125 05:13:13.082653 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (63.489µs) I0125 05:13:13.083174 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:13:13.083356 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k ready condition last transition time 2017-01-25 05:13:12 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:13.083339411 -0500 EST. I0125 05:13:13.084619 4678 audit.go:125] 2017-01-25T05:13:13.084581696-05:00 AUDIT: id="7d15dffe-5320-473e-9665-55294af8116e" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:13:13.088070 4678 audit.go:45] 2017-01-25T05:13:13.0880563-05:00 AUDIT: id="7d15dffe-5320-473e-9665-55294af8116e" response="200" I0125 05:13:13.088242 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (3.978065ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:13:13.089048 4678 audit.go:125] 2017-01-25T05:13:13.089005674-05:00 AUDIT: id="df10b31c-bf1d-48e2-950d-33d79b465d5b" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:deploymentconfig-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy" I0125 05:13:13.090885 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 4) I0125 05:13:13.090916 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:13:13.091051 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k ready condition last transition time 2017-01-25 05:13:12 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:13:13.091032303 -0500 EST. I0125 05:13:13.093774 4678 replication_controller.go:378] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11163 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11166 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp:2017-01-25 05:13:13.091774132 -0500 EST DeletionGracePeriodSeconds:0xc42ae86fe8 Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:13.093882 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:13:13.091774132 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-master-2]. I0125 05:13:13.093970 4678 replication_controller.go:255] No controllers found for pod postgresql-master-2-deploy, replication manager will avoid syncing I0125 05:13:13.094006 4678 replica_set.go:320] Pod postgresql-master-2-deploy updated, objectMeta {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11163 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy UID:d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11166 Generation:0 CreationTimestamp:2017-01-25 05:12:56.024155702 -0500 EST DeletionTimestamp:2017-01-25 05:13:13.091774132 -0500 EST DeletionGracePeriodSeconds:0xc42ae86fe8 Labels:map[openshift.io/deployer-pod-for.name:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:13:13.094510 4678 config.go:281] Setting pods for source api I0125 05:13:13.095691 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.097273 4678 audit.go:125] 2017-01-25T05:13:13.097234418-05:00 AUDIT: id="f5233d0b-2378-4ee4-8907-fb4de50883c7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy" I0125 05:13:13.094099 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:13:13.091774132 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy", UID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11166", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935976, nsec:24155702, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc435ef0040), DeletionGracePeriodSeconds:(*int64)(0xc42ae86fe8), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-master-2"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/deployment.name":"postgresql-master-2"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc423f6c840), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-master-2", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc423f6c900), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc42ae87090), ActiveDeadlineSeconds:(*int64)(0xc42ae87098), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc435860640), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935976, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935993, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935976, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.4", StartTime:(*unversioned.Time)(0xc435ef0460), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc4299e8b60)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28"}}}}. I0125 05:13:13.097982 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-deploy, ReplicaSet controller will avoid syncing I0125 05:13:13.098023 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-deploy, job controller will avoid syncing I0125 05:13:13.098044 4678 daemoncontroller.go:332] Pod postgresql-master-2-deploy updated. I0125 05:13:13.098075 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-deploy, daemon set controller will avoid syncing I0125 05:13:13.098099 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-deploy" I0125 05:13:13.098114 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:13:13.098119 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-deploy" I0125 05:13:13.098219 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-deploy, StatefulSet controller will avoid syncing I0125 05:13:13.099115 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:13.099151 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:13.112447 4678 audit.go:45] 2017-01-25T05:13:13.112415404-05:00 AUDIT: id="f5233d0b-2378-4ee4-8907-fb4de50883c7" response="200" I0125 05:13:13.112625 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy: (15.668132ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:13.113024 4678 audit.go:45] 2017-01-25T05:13:13.113012559-05:00 AUDIT: id="df10b31c-bf1d-48e2-950d-33d79b465d5b" response="200" I0125 05:13:13.113151 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy: (28.552906ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:deploymentconfig-controller] 172.18.7.222:50846] I0125 05:13:13.114110 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:13:13.091774132 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-master-2]. I0125 05:13:13.114242 4678 replication_controller.go:255] No controllers found for pod postgresql-master-2-deploy, replication manager will avoid syncing I0125 05:13:13.114271 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:13:13.091774132 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy", UID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11167", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935976, nsec:24155702, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42f94e320), DeletionGracePeriodSeconds:(*int64)(0xc42c3ba238), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-master-2"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-master-2", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42b6259b0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-master-2", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42b625aa0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc42c3ba2e0), ActiveDeadlineSeconds:(*int64)(0xc42c3ba2e8), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc429d99100), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935976, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935993, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935976, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.4", StartTime:(*unversioned.Time)(0xc42f94e560), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc435350540)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28"}}}}. I0125 05:13:13.114573 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-deploy, ReplicaSet controller will avoid syncing I0125 05:13:13.114609 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-deploy, job controller will avoid syncing I0125 05:13:13.114638 4678 daemoncontroller.go:367] Pod postgresql-master-2-deploy deleted. I0125 05:13:13.114664 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-deploy, daemon set controller will avoid syncing I0125 05:13:13.114683 4678 disruption.go:355] deletePod called on pod "postgresql-master-2-deploy" I0125 05:13:13.114697 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:13:13.114702 4678 disruption.go:358] No matching pdb for pod "postgresql-master-2-deploy" I0125 05:13:13.114775 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:13:13.114801 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-deploy, StatefulSet controller will avoid syncing I0125 05:13:13.115849 4678 config.go:281] Setting pods for source api I0125 05:13:13.117087 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.117158 4678 kubelet_pods.go:785] Killing unwanted pod "postgresql-master-2-deploy" I0125 05:13:13.117233 4678 docker_manager.go:1536] Killing container "3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28 deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy" with 0 second grace period I0125 05:13:13.118012 4678 audit.go:125] 2017-01-25T05:13:13.117972189-05:00 AUDIT: id="f6f28b47-b1a5-4828-8ad9-6b743ca65cf8" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status" I0125 05:13:13.119563 4678 audit.go:45] 2017-01-25T05:13:13.119547279-05:00 AUDIT: id="f6f28b47-b1a5-4828-8ad9-6b743ca65cf8" response="409" I0125 05:13:13.119644 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-deploy/status: (1.915313ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] W0125 05:13:13.119952 4678 status_manager.go:451] Failed to update status for pod "_()": Operation cannot be fulfilled on pods "postgresql-master-2-deploy": StorageError: invalid object, Code: 4, Key: kubernetes.io/pods/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094, UID in object meta: I0125 05:13:13.126557 4678 proxier.go:797] syncProxyRules took 279.622342ms I0125 05:13:13.126590 4678 proxier.go:431] OnServiceUpdate took 279.686718ms for 4 services I0125 05:13:13.127134 4678 docker_manager.go:1577] Container "3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28 deployment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy" exited after 9.882831ms I0125 05:13:13.127627 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-deploy", UID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11103", FieldPath:"spec.containers{deployment}"}): type: 'Normal' reason: 'Killing' Killing container with docker id 3d4f90b50c8d: Need to kill pod. I0125 05:13:13.128253 4678 audit.go:125] 2017-01-25T05:13:13.12819464-05:00 AUDIT: id="e7f02b34-b306-48d6-bc3e-16302dcb4aa3" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:13:13.131030 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:13.131135 4678 audit.go:45] 2017-01-25T05:13:13.131114294-05:00 AUDIT: id="e7f02b34-b306-48d6-bc3e-16302dcb4aa3" response="201" I0125 05:13:13.131230 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.2966ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:13.132902 4678 docker_manager.go:1536] Killing container "2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy" with 0 second grace period I0125 05:13:13.142024 4678 secret.go:179] Setting up volume deployer-token-r7jj8 for pod d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:13.142983 4678 audit.go:125] 2017-01-25T05:13:13.142937844-05:00 AUDIT: id="7b055e38-b3ef-48b7-a2ec-b84ac25860ac" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:13:13.144417 4678 audit.go:45] 2017-01-25T05:13:13.14439949-05:00 AUDIT: id="7b055e38-b3ef-48b7-a2ec-b84ac25860ac" response="200" I0125 05:13:13.144645 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (1.979472ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:13.144936 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/deployer-token-r7jj8 containing (4) pieces of data, 4270 total bytes I0125 05:13:13.145192 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy volume deployer-token-r7jj8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:13.145355 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:13.340484 4678 helpers.go:101] Unable to get network stats from pid 11170: couldn't read network stats: failure opening /proc/11170/net/dev: open /proc/11170/net/dev: no such file or directory I0125 05:13:13.346931 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.347609 4678 audit.go:125] 2017-01-25T05:13:13.347571939-05:00 AUDIT: id="c063d933-21f4-4ebf-af64-7632f3f09fa0" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:13:13.348740 4678 audit.go:45] 2017-01-25T05:13:13.348730251-05:00 AUDIT: id="c063d933-21f4-4ebf-af64-7632f3f09fa0" response="200" I0125 05:13:13.348941 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (1.576588ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:13.349071 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.349119 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:13.349129 4678 helpers.go:78] Already ran container "deployment" of pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)", do nothing I0125 05:13:13.349143 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[]} I0125 05:13:13.349171 4678 docker_manager.go:2093] Killing Infra Container for "postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094)" because all other containers are dead. I0125 05:13:13.357709 4678 audit.go:125] 2017-01-25T05:13:13.357677859-05:00 AUDIT: id="59ff06dd-6c25-482d-bc4d-102199376ee5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:13.358119 4678 audit.go:45] 2017-01-25T05:13:13.358110358-05:00 AUDIT: id="59ff06dd-6c25-482d-bc4d-102199376ee5" response="200" I0125 05:13:13.358419 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (944.77µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:13.393981 4678 docker_manager.go:1577] Container "2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy" exited after 261.042055ms I0125 05:13:13.394718 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:13:13.396389 4678 docker_manager.go:1536] Killing container "2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy" with 10 second grace period I0125 05:13:13.397985 4678 docker_manager.go:1577] Container "2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy" exited after 1.576553ms W0125 05:13:13.398011 4678 docker_manager.go:1583] No ref for pod '"2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-deploy"' I0125 05:13:13.684626 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:13.690620 4678 kubelet_volumes.go:104] Orphaned pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" found, but volumes are not cleaned up I0125 05:13:14.007987 4678 gc_controller.go:175] GC'ing orphaned I0125 05:13:14.008010 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:13:14.051626 4678 generic.go:145] GenericPLEG: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d: running -> exited I0125 05:13:14.058007 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc429d9ef20 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/08cf920c Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42c9e70e0 NetworkSettings:0xc425b66200} I0125 05:13:14.061617 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc429d9f340 Mounts:[] Config:0xc42c9e7440 NetworkSettings:0xc425b66300} I0125 05:13:14.063134 4678 generic.go:342] PLEG: Write status for postgresql-master-2-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc424949ea0), (*container.ContainerStatus)(0xc4261062a0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:13:14.063209 4678 kubelet.go:1820] SyncLoop (PLEG): ignore irrelevant event: &pleg.PodLifecycleEvent{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d"} I0125 05:13:14.068187 4678 audit.go:125] 2017-01-25T05:13:14.068147227-05:00 AUDIT: id="a909efdf-4ae9-4006-ae11-2f2c7bd6ce18" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:14.069488 4678 audit.go:45] 2017-01-25T05:13:14.069473932-05:00 AUDIT: id="a909efdf-4ae9-4006-ae11-2f2c7bd6ce18" response="200" I0125 05:13:14.069573 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.064699ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:14.213331 4678 helpers.go:101] Unable to get network stats from pid 12449: couldn't read network stats: failure opening /proc/12449/net/dev: open /proc/12449/net/dev: no such file or directory I0125 05:13:14.653397 4678 helpers.go:101] Unable to get network stats from pid 12340: couldn't read network stats: failure opening /proc/12340/net/dev: open /proc/12340/net/dev: no such file or directory I0125 05:13:15.069142 4678 audit.go:125] 2017-01-25T05:13:15.06908982-05:00 AUDIT: id="7221714c-599f-4168-9c24-fe5c0e4dd3be" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:15.070630 4678 audit.go:45] 2017-01-25T05:13:15.070618659-05:00 AUDIT: id="7221714c-599f-4168-9c24-fe5c0e4dd3be" response="200" I0125 05:13:15.070712 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (4.009304ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:15.246066 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (spec.Name: "deployer-token-r7jj8") from pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:15.246174 4678 util.go:340] Tearing down volume deployer-token-r7jj8 for pod d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:15.246232 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 I0125 05:13:15.258777 4678 audit.go:125] 2017-01-25T05:13:15.258721078-05:00 AUDIT: id="a58b4c09-78e4-4bad-a0cf-9582696864a6" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:13:15.260328 4678 audit.go:45] 2017-01-25T05:13:15.260312678-05:00 AUDIT: id="a58b4c09-78e4-4bad-a0cf-9582696864a6" response="200" I0125 05:13:15.260444 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (2.045753ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:15.270887 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094-deployer-token-r7jj8" (OuterVolumeSpecName: "deployer-token-r7jj8") pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "deployer-token-r7jj8". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:13:15.684628 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:15.690710 4678 kubelet_volumes.go:113] Orphaned pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:13:16.068295 4678 audit.go:125] 2017-01-25T05:13:16.068256366-05:00 AUDIT: id="17a46462-2469-4a28-a85f-45833813f65e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:16.069620 4678 audit.go:45] 2017-01-25T05:13:16.069608257-05:00 AUDIT: id="17a46462-2469-4a28-a85f-45833813f65e" response="200" I0125 05:13:16.069697 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.103013ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:16.882052 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:16.882077 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:16.895083 4678 helpers.go:101] Unable to get network stats from pid 11219: couldn't read network stats: failure opening /proc/11219/net/dev: open /proc/11219/net/dev: no such file or directory I0125 05:13:17.000646 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-2-deploy I0125 05:13:17.000702 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-slave-1-deploy I0125 05:13:17.000724 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-1-6jfgj I0125 05:13:17.000798 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:13:17.068238 4678 audit.go:125] 2017-01-25T05:13:17.068180045-05:00 AUDIT: id="b317667d-002f-47f4-8712-b4f6a1421eaa" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:17.069619 4678 audit.go:45] 2017-01-25T05:13:17.069607192-05:00 AUDIT: id="b317667d-002f-47f4-8712-b4f6a1421eaa" response="200" I0125 05:13:17.069697 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.135267ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:17.642913 4678 audit.go:125] 2017-01-25T05:13:17.642870227-05:00 AUDIT: id="a63993bf-6441-47f8-9ec7-8e635537d18d" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:13:17.643349 4678 audit.go:45] 2017-01-25T05:13:17.643339583-05:00 AUDIT: id="a63993bf-6441-47f8-9ec7-8e635537d18d" response="200" I0125 05:13:17.643684 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.019262ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:17.684528 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:17.712761 4678 audit.go:125] 2017-01-25T05:13:17.712729698-05:00 AUDIT: id="f8f0299a-a7ab-48c2-9e2c-b4cf32b9a34d" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:13:17.714831 4678 audit.go:45] 2017-01-25T05:13:17.714815306-05:00 AUDIT: id="f8f0299a-a7ab-48c2-9e2c-b4cf32b9a34d" response="200" I0125 05:13:17.715113 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.586398ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:17.716000 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:13:18.068198 4678 audit.go:125] 2017-01-25T05:13:18.068151446-05:00 AUDIT: id="32f6589c-8acf-4ab3-b7c8-f4577118291a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:18.069539 4678 audit.go:45] 2017-01-25T05:13:18.069521864-05:00 AUDIT: id="32f6589c-8acf-4ab3-b7c8-f4577118291a" response="200" I0125 05:13:18.069617 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.05263ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:18.359780 4678 audit.go:125] 2017-01-25T05:13:18.359744586-05:00 AUDIT: id="833307d8-5b5f-4d20-9fbe-540b27c7b0be" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:18.360246 4678 audit.go:45] 2017-01-25T05:13:18.360232577-05:00 AUDIT: id="833307d8-5b5f-4d20-9fbe-540b27c7b0be" response="200" I0125 05:13:18.360595 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.075121ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:18.360885 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:13:19.068181 4678 audit.go:125] 2017-01-25T05:13:19.068139422-05:00 AUDIT: id="ffa4d057-7393-4ae5-bb75-ec8449dcc78b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:19.069450 4678 audit.go:45] 2017-01-25T05:13:19.069437224-05:00 AUDIT: id="ffa4d057-7393-4ae5-bb75-ec8449dcc78b" response="200" I0125 05:13:19.069537 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.946731ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:19.132160 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:13:19.132229 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:13:19.132262 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:19.132279 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:19.132286 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:13:19.132296 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:19.132306 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:19.132381 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:19.132388 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:19.132393 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:19.132398 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:19.132445 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:19.132450 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:13:19.132482 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:13:19.132491 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:19.132510 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:19.132520 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:19.132815 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:13:19.132862 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:13:19.132880 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:19.132885 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:13:19.132899 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:19.132907 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:13:19.132911 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:19.132916 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:19.132816 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:13:19.218401 4678 audit.go:125] 2017-01-25T05:13:19.218354711-05:00 AUDIT: id="08e61ac8-819d-4d5e-b290-8c3bf7199ece" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:13:19.219295 4678 audit.go:45] 2017-01-25T05:13:19.2192809-05:00 AUDIT: id="08e61ac8-819d-4d5e-b290-8c3bf7199ece" response="200" I0125 05:13:19.219368 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.018205ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:19.219610 4678 controller.go:106] Found 0 cronjobs I0125 05:13:19.221178 4678 audit.go:125] 2017-01-25T05:13:19.221156672-05:00 AUDIT: id="9e04991f-2150-4784-aca6-88bf49aa085a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:13:19.221889 4678 audit.go:45] 2017-01-25T05:13:19.221879437-05:00 AUDIT: id="9e04991f-2150-4784-aca6-88bf49aa085a" response="200" I0125 05:13:19.221941 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.125576ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:19.222103 4678 controller.go:114] Found 0 jobs I0125 05:13:19.222111 4678 controller.go:117] Found 0 groups I0125 05:13:19.261000 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:19.261027 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:19.261684 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:19.261702 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:19.262022 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42d524740 -1 [] true false map[] 0xc42648a5a0 } I0125 05:13:19.262082 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:19.262277 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc42d524820 -1 [] true false map[] 0xc42d33ed20 } I0125 05:13:19.262300 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:19.684619 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:13:19.684668 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:19.684771 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:19.684970 4678 status_manager.go:312] Ignoring same status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:25 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.8 StartTime:2017-01-25 05:12:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running:0xc42ea381e0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} I0125 05:13:19.685097 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:19.754889 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:19.755801 4678 audit.go:125] 2017-01-25T05:13:19.75575666-05:00 AUDIT: id="8a78d3da-6a71-40ab-b426-e836a0e8daf9" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:13:19.757211 4678 audit.go:45] 2017-01-25T05:13:19.757180123-05:00 AUDIT: id="8a78d3da-6a71-40ab-b426-e836a0e8daf9" response="200" I0125 05:13:19.757459 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (1.973609ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:19.757718 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:13:19.757947 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:13:19.758095 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:19.985300 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:19.985974 4678 audit.go:125] 2017-01-25T05:13:19.985932947-05:00 AUDIT: id="fc0e4f1b-8265-4358-91ec-2bf0b659e6eb" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:13:19.987112 4678 audit.go:45] 2017-01-25T05:13:19.987099325-05:00 AUDIT: id="fc0e4f1b-8265-4358-91ec-2bf0b659e6eb" response="200" I0125 05:13:19.987333 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (1.616008ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:19.987547 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:19.987601 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:19.987612 4678 docker_manager.go:1999] pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql" exists as 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208 I0125 05:13:19.987738 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017:-1 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208:0]} I0125 05:13:20.068135 4678 audit.go:125] 2017-01-25T05:13:20.068087372-05:00 AUDIT: id="2720cbf5-a868-40af-ae6f-3878a02d9661" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:20.069598 4678 audit.go:45] 2017-01-25T05:13:20.069587589-05:00 AUDIT: id="2720cbf5-a868-40af-ae6f-3878a02d9661" response="200" I0125 05:13:20.069674 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.134783ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:21.067998 4678 audit.go:125] 2017-01-25T05:13:21.067954723-05:00 AUDIT: id="71c65381-ad2d-4bf6-8e92-ab81bfe55d0f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:21.069168 4678 audit.go:45] 2017-01-25T05:13:21.06915657-05:00 AUDIT: id="71c65381-ad2d-4bf6-8e92-ab81bfe55d0f" response="200" I0125 05:13:21.069257 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.725814ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:21.684609 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:21.934340 4678 audit.go:125] 2017-01-25T05:13:21.934307252-05:00 AUDIT: id="19ba931a-1343-48b5-b75a-c73dd3f3abae" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:13:21.935394 4678 audit.go:45] 2017-01-25T05:13:21.935376265-05:00 AUDIT: id="19ba931a-1343-48b5-b75a-c73dd3f3abae" response="200" I0125 05:13:21.935483 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.410471ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:22.068144 4678 audit.go:125] 2017-01-25T05:13:22.068101029-05:00 AUDIT: id="5050210a-119f-45ee-9e5b-5b066ec3835b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:22.069525 4678 audit.go:45] 2017-01-25T05:13:22.069513148-05:00 AUDIT: id="5050210a-119f-45ee-9e5b-5b066ec3835b" response="200" I0125 05:13:22.069604 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.091349ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:22.498888 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:22.550544 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:22.550588 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:13:22.904445 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:13:22.904940 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:22.906267 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:22.956562 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:22.956593 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:22.967155 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:22.967181 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:22.967928 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:22.967946 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:22.968827 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:22 GMT]] 0xc4347cd7a0 0 [] true false map[] 0xc435167e00 } I0125 05:13:22.968879 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:22.968977 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:22 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4347cd8c0 0 [] true false map[] 0xc42adc53b0 } I0125 05:13:22.969012 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:23.068278 4678 audit.go:125] 2017-01-25T05:13:23.068218175-05:00 AUDIT: id="03fae8e8-9a3c-449a-91e7-bf42ce2c9ce1" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:23.069707 4678 audit.go:45] 2017-01-25T05:13:23.069693395-05:00 AUDIT: id="03fae8e8-9a3c-449a-91e7-bf42ce2c9ce1" response="200" I0125 05:13:23.069784 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.220658ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:23.091564 4678 helpers.go:101] Unable to get network stats from pid 10368: couldn't read network stats: failure opening /proc/10368/net/dev: open /proc/10368/net/dev: no such file or directory I0125 05:13:23.361828 4678 audit.go:125] 2017-01-25T05:13:23.361790753-05:00 AUDIT: id="0eca886a-96cc-4ac1-8cf1-7196d4827b34" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:23.362255 4678 audit.go:45] 2017-01-25T05:13:23.362245393-05:00 AUDIT: id="0eca886a-96cc-4ac1-8cf1-7196d4827b34" response="200" I0125 05:13:23.362665 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.082069ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:23.684615 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:23.763118 4678 helpers.go:101] Unable to get network stats from pid 10035: couldn't read network stats: failure opening /proc/10035/net/dev: open /proc/10035/net/dev: no such file or directory I0125 05:13:24.068155 4678 audit.go:125] 2017-01-25T05:13:24.068112585-05:00 AUDIT: id="470d729c-67bc-4384-abfc-f3593b8928a0" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:24.069502 4678 audit.go:45] 2017-01-25T05:13:24.069489715-05:00 AUDIT: id="470d729c-67bc-4384-abfc-f3593b8928a0" response="200" I0125 05:13:24.069582 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.038288ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:25.068003 4678 audit.go:125] 2017-01-25T05:13:25.067961683-05:00 AUDIT: id="2a2c7817-543e-435a-a90b-27d407e8e871" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:25.069173 4678 audit.go:45] 2017-01-25T05:13:25.069158364-05:00 AUDIT: id="2a2c7817-543e-435a-a90b-27d407e8e871" response="200" I0125 05:13:25.069260 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.735358ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:25.266401 4678 audit.go:125] 2017-01-25T05:13:25.266361835-05:00 AUDIT: id="4c4f1b97-bd8a-444d-b047-86d04747d6dc" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:13:25.267458 4678 audit.go:45] 2017-01-25T05:13:25.267447661-05:00 AUDIT: id="4c4f1b97-bd8a-444d-b047-86d04747d6dc" response="200" I0125 05:13:25.267535 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.383703ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:25.684631 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:26.068128 4678 audit.go:125] 2017-01-25T05:13:26.068082237-05:00 AUDIT: id="b75313be-6aac-4a5b-b880-00c1503c543d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:26.069528 4678 audit.go:45] 2017-01-25T05:13:26.069516808-05:00 AUDIT: id="b75313be-6aac-4a5b-b880-00c1503c543d" response="200" I0125 05:13:26.069608 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.064145ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:26.160753 4678 helpers.go:101] Unable to get network stats from pid 10277: couldn't read network stats: failure opening /proc/10277/net/dev: open /proc/10277/net/dev: no such file or directory I0125 05:13:26.705827 4678 helpers.go:101] Unable to get network stats from pid 10709: couldn't read network stats: failure opening /proc/10709/net/dev: open /proc/10709/net/dev: no such file or directory I0125 05:13:27.005942 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:27.005960 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:27.068722 4678 audit.go:125] 2017-01-25T05:13:27.068665769-05:00 AUDIT: id="3ae1f6e8-8fff-4a93-ab71-7d6e58c539a4" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:27.070414 4678 audit.go:45] 2017-01-25T05:13:27.070398112-05:00 AUDIT: id="3ae1f6e8-8fff-4a93-ab71-7d6e58c539a4" response="200" I0125 05:13:27.070505 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.738312ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:27.128773 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-slave-1-deploy I0125 05:13:27.128842 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-2-deploy I0125 05:13:27.128866 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-1-6jfgj I0125 05:13:27.128914 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:13:27.684647 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094) I0125 05:13:27.684715 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:27.684887 4678 kubelet_pods.go:1029] Generating status for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:27.685053 4678 status_manager.go:312] Ignoring same status for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:42 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.3 StartTime:2017-01-25 03:40:22 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:registry State:{Waiting: Running:0xc4269a4ba0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-docker-registry:86a9783 ImageID:docker://sha256:3ec55bd72e2d99d049485e7f0556140392c415053ffba63b99bdeca83d4e5b7f ContainerID:docker://b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b}]} I0125 05:13:27.685186 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:27.716628 4678 audit.go:125] 2017-01-25T05:13:27.716588362-05:00 AUDIT: id="b10a8a37-77bd-48d6-8ca2-75f965c8b435" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:13:27.717028 4678 audit.go:45] 2017-01-25T05:13:27.717018356-05:00 AUDIT: id="b10a8a37-77bd-48d6-8ca2-75f965c8b435" response="200" I0125 05:13:27.717366 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (987.064µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:27.772640 4678 secret.go:179] Setting up volume registry-token-vjbst for pod e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:13:27.773524 4678 audit.go:125] 2017-01-25T05:13:27.773483686-05:00 AUDIT: id="e20d5da1-e6f4-4cf5-97d7-95f16d7f6918" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-token-vjbst" I0125 05:13:27.774895 4678 audit.go:45] 2017-01-25T05:13:27.774878773-05:00 AUDIT: id="e20d5da1-e6f4-4cf5-97d7-95f16d7f6918" response="200" I0125 05:13:27.775112 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-token-vjbst: (1.923674ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:27.775350 4678 secret.go:206] Received secret default/registry-token-vjbst containing (4) pieces of data, 4113 total bytes I0125 05:13:27.776054 4678 atomic_writer.go:142] pod default/docker-registry-1-xppm3 volume registry-token-vjbst: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:13:27.776230 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094-registry-token-vjbst" (spec.Name: "registry-token-vjbst") pod "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094" (UID: "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:27.785503 4678 audit.go:125] 2017-01-25T05:13:27.785473237-05:00 AUDIT: id="8ee91cc1-0ae3-4726-993b-f4b0a3328025" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:13:27.787654 4678 audit.go:45] 2017-01-25T05:13:27.787639674-05:00 AUDIT: id="8ee91cc1-0ae3-4726-993b-f4b0a3328025" response="200" I0125 05:13:27.787937 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.671524ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:27.788813 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:13:27.985442 4678 volume_manager.go:365] All volumes are attached and mounted for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:27.986067 4678 audit.go:125] 2017-01-25T05:13:27.986036212-05:00 AUDIT: id="5345f2ae-5582-4882-b68b-ed78cca01601" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c" I0125 05:13:27.987231 4678 audit.go:45] 2017-01-25T05:13:27.987219825-05:00 AUDIT: id="5345f2ae-5582-4882-b68b-ed78cca01601" response="200" I0125 05:13:27.987450 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c: (1.61212ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:27.987596 4678 docker_manager.go:1938] Found pod infra container for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:27.987647 4678 docker_manager.go:1951] Pod infra container looks good, keep it "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:27.987656 4678 docker_manager.go:1999] pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" container "registry" exists as b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b I0125 05:13:27.987780 4678 docker_manager.go:2086] Got container changes for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59:-1 b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b:0]} I0125 05:13:28.068276 4678 audit.go:125] 2017-01-25T05:13:28.068229517-05:00 AUDIT: id="6bc9e9ac-83c5-4ec2-aebd-45b783a4eac0" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:28.069680 4678 audit.go:45] 2017-01-25T05:13:28.069666576-05:00 AUDIT: id="6bc9e9ac-83c5-4ec2-aebd-45b783a4eac0" response="200" I0125 05:13:28.069753 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.20394ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:28.187100 4678 reflector.go:273] github.com/openshift/origin/pkg/user/cache/groups.go:51: forcing resync I0125 05:13:28.364229 4678 audit.go:125] 2017-01-25T05:13:28.36416598-05:00 AUDIT: id="40b3d754-eca8-4438-8c4e-177a9b57a85e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:28.364771 4678 audit.go:45] 2017-01-25T05:13:28.364756437-05:00 AUDIT: id="40b3d754-eca8-4438-8c4e-177a9b57a85e" response="200" I0125 05:13:28.365155 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.273731ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:28.365642 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:13:29.068424 4678 audit.go:125] 2017-01-25T05:13:29.068372463-05:00 AUDIT: id="992a86b2-66c6-484a-9913-030b886b6aa5" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:29.069881 4678 audit.go:45] 2017-01-25T05:13:29.06986583-05:00 AUDIT: id="992a86b2-66c6-484a-9913-030b886b6aa5" response="200" I0125 05:13:29.069972 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.360452ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:29.224893 4678 audit.go:125] 2017-01-25T05:13:29.224855097-05:00 AUDIT: id="b514f624-0c7e-44b9-8c67-872c569835c0" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:13:29.225808 4678 audit.go:45] 2017-01-25T05:13:29.225797376-05:00 AUDIT: id="b514f624-0c7e-44b9-8c67-872c569835c0" response="200" I0125 05:13:29.225893 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.028547ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:29.226162 4678 controller.go:106] Found 0 cronjobs I0125 05:13:29.228008 4678 audit.go:125] 2017-01-25T05:13:29.22797555-05:00 AUDIT: id="3933359b-0549-498c-a6d8-84159f3af944" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:13:29.228820 4678 audit.go:45] 2017-01-25T05:13:29.228804255-05:00 AUDIT: id="3933359b-0549-498c-a6d8-84159f3af944" response="200" I0125 05:13:29.228887 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.48553ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:29.229109 4678 controller.go:114] Found 0 jobs I0125 05:13:29.229118 4678 controller.go:117] Found 0 groups I0125 05:13:29.261009 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:29.261037 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:29.261796 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:29.261823 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:29.261998 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42d6f09e0 -1 [] true false map[] 0xc4286cef00 } I0125 05:13:29.262047 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:29.262432 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42d6f0ae0 -1 [] true false map[] 0xc42457d1d0 } I0125 05:13:29.262458 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:29.623811 4678 helpers.go:101] Unable to get network stats from pid 11219: couldn't read network stats: failure opening /proc/11219/net/dev: open /proc/11219/net/dev: no such file or directory I0125 05:13:29.684637 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:30.068493 4678 audit.go:125] 2017-01-25T05:13:30.068442896-05:00 AUDIT: id="08e3e10e-9e2e-40f8-bc70-f2f36eedecfc" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:30.069930 4678 audit.go:45] 2017-01-25T05:13:30.069917239-05:00 AUDIT: id="08e3e10e-9e2e-40f8-bc70-f2f36eedecfc" response="200" I0125 05:13:30.070012 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.423449ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:30.255552 4678 helpers.go:101] Unable to get network stats from pid 12340: couldn't read network stats: failure opening /proc/12340/net/dev: open /proc/12340/net/dev: no such file or directory I0125 05:13:31.068256 4678 audit.go:125] 2017-01-25T05:13:31.068184979-05:00 AUDIT: id="bae47fd0-9162-4c28-9f99-e5c52f3eb1dd" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:31.069660 4678 audit.go:45] 2017-01-25T05:13:31.069647456-05:00 AUDIT: id="bae47fd0-9162-4c28-9f99-e5c52f3eb1dd" response="200" I0125 05:13:31.069745 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.192905ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:31.684603 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:31.921131 4678 helpers.go:101] Unable to get network stats from pid 12449: couldn't read network stats: failure opening /proc/12449/net/dev: open /proc/12449/net/dev: no such file or directory I0125 05:13:31.936695 4678 audit.go:125] 2017-01-25T05:13:31.936657342-05:00 AUDIT: id="28f127fa-06d4-4f5f-90d2-c9f36f98a98e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:13:31.937788 4678 audit.go:45] 2017-01-25T05:13:31.937773506-05:00 AUDIT: id="28f127fa-06d4-4f5f-90d2-c9f36f98a98e" response="200" I0125 05:13:31.937875 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.425722ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:32.029866 4678 helpers.go:101] Unable to get network stats from pid 11170: couldn't read network stats: failure opening /proc/11170/net/dev: open /proc/11170/net/dev: no such file or directory I0125 05:13:32.068506 4678 audit.go:125] 2017-01-25T05:13:32.068445757-05:00 AUDIT: id="9469acc5-8b93-4516-9465-eb28d812460b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:32.069938 4678 audit.go:45] 2017-01-25T05:13:32.069926469-05:00 AUDIT: id="9469acc5-8b93-4516-9465-eb28d812460b" response="200" I0125 05:13:32.070039 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.445002ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:32.250337 4678 reflector.go:273] github.com/openshift/origin/pkg/project/auth/cache.go:201: forcing resync I0125 05:13:32.498807 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:32.552471 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:32.552495 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:13:32.904449 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:13:32.904921 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:32.906257 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:32.954991 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:32.955031 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:32.967185 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:32.967223 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:32.967966 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:32.967982 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:32.968714 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42b63f720 0 [] true false map[] 0xc42c9d45a0 } I0125 05:13:32.968771 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:32.968877 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42b63f840 0 [] true false map[] 0xc4292fc5a0 } I0125 05:13:32.968914 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:33.068098 4678 audit.go:125] 2017-01-25T05:13:33.068060066-05:00 AUDIT: id="43370ef4-3a7f-46a0-94cc-39da1d21ef93" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:33.069424 4678 audit.go:45] 2017-01-25T05:13:33.06941314-05:00 AUDIT: id="43370ef4-3a7f-46a0-94cc-39da1d21ef93" response="200" I0125 05:13:33.069494 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.946119ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:33.366651 4678 audit.go:125] 2017-01-25T05:13:33.366617647-05:00 AUDIT: id="b4f8c271-1cb3-4069-9e49-8b770381db63" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:33.367077 4678 audit.go:45] 2017-01-25T05:13:33.36706822-05:00 AUDIT: id="b4f8c271-1cb3-4069-9e49-8b770381db63" response="200" I0125 05:13:33.367431 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.028032ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:33.572492 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:13:33.572675 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (2.792µs) I0125 05:13:33.577818 4678 audit.go:125] 2017-01-25T05:13:33.577767462-05:00 AUDIT: id="799326ea-1254-42f8-8564-1a2ab68d8748" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:33.578350 4678 audit.go:125] 2017-01-25T05:13:33.57832192-05:00 AUDIT: id="eb7d705d-2ba3-4f9d-9191-ec8f597e9787" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:13:33.579136 4678 audit.go:125] 2017-01-25T05:13:33.57910616-05:00 AUDIT: id="412642c0-ba37-469e-b24d-765eeeccc587" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:13:33.579497 4678 audit.go:125] 2017-01-25T05:13:33.579462265-05:00 AUDIT: id="f8393980-0b5c-40ec-994e-754969dfad57" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:13:33.579846 4678 audit.go:125] 2017-01-25T05:13:33.579809102-05:00 AUDIT: id="0c005189-db0d-46ea-85bd-cf08c56d07b4" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:13:33.580090 4678 audit.go:45] 2017-01-25T05:13:33.580075592-05:00 AUDIT: id="412642c0-ba37-469e-b24d-765eeeccc587" response="200" I0125 05:13:33.580179 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (6.075185ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:33.580396 4678 audit.go:45] 2017-01-25T05:13:33.58038437-05:00 AUDIT: id="799326ea-1254-42f8-8564-1a2ab68d8748" response="200" I0125 05:13:33.580449 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (6.794392ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:33.580616 4678 audit.go:45] 2017-01-25T05:13:33.580604256-05:00 AUDIT: id="eb7d705d-2ba3-4f9d-9191-ec8f597e9787" response="200" I0125 05:13:33.580666 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (5.627018ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:33.580987 4678 audit.go:45] 2017-01-25T05:13:33.580973507-05:00 AUDIT: id="0c005189-db0d-46ea-85bd-cf08c56d07b4" response="200" I0125 05:13:33.581035 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (5.472189ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:33.581110 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (8.432016ms) I0125 05:13:33.581210 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (8.535489ms) I0125 05:13:33.581213 4678 audit.go:45] 2017-01-25T05:13:33.58118647-05:00 AUDIT: id="f8393980-0b5c-40ec-994e-754969dfad57" response="200" I0125 05:13:33.581273 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (6.699273ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:33.581394 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (8.709708ms) I0125 05:13:33.581411 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:13:33.581697 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (9.053467ms) I0125 05:13:33.583400 4678 audit.go:125] 2017-01-25T05:13:33.58337525-05:00 AUDIT: id="8e1c1392-3452-4100-9d1c-cb2343c513b6" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:13:33.584983 4678 audit.go:45] 2017-01-25T05:13:33.584972692-05:00 AUDIT: id="8e1c1392-3452-4100-9d1c-cb2343c513b6" response="200" I0125 05:13:33.585025 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.199761ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:13:33.585318 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (12.612484ms) I0125 05:13:33.585522 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:13:33.585649 4678 proxier.go:804] Syncing iptables rules I0125 05:13:33.585660 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:33.595419 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:13:33.595508 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:13:33.595545 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:33.595554 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:33.595562 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:33.595572 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:33.595580 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:13:33.595589 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:13:33.595602 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:13:33.605467 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:33.624049 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:33.642500 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:33.651556 4678 reflector.go:273] github.com/openshift/origin/pkg/service/controller/servingcert/secret_creating_controller.go:118: forcing resync I0125 05:13:33.651728 4678 secret_creating_controller.go:103] Updating service docker-registry I0125 05:13:33.651769 4678 secret_creating_controller.go:103] Updating service kubernetes I0125 05:13:33.651779 4678 secret_creating_controller.go:103] Updating service router I0125 05:13:33.651786 4678 secret_creating_controller.go:103] Updating service postgresql-master I0125 05:13:33.651794 4678 secret_creating_controller.go:103] Updating service postgresql-slave I0125 05:13:33.651807 4678 secret_creating_controller.go:103] Updating service postgresql-helper I0125 05:13:33.651857 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:352: forcing resync I0125 05:13:33.651884 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:210: forcing resync I0125 05:13:33.651908 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:90: forcing resync I0125 05:13:33.651920 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:207: forcing resync I0125 05:13:33.651935 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:304: forcing resync I0125 05:13:33.651990 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/ruby I0125 05:13:33.652164 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/php I0125 05:13:33.652191 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/redis I0125 05:13:33.652225 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/perl I0125 05:13:33.652242 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/python I0125 05:13:33.652257 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/mysql I0125 05:13:33.652272 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/postgresql I0125 05:13:33.652286 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/mongodb I0125 05:13:33.652301 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/nodejs I0125 05:13:33.652316 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/wildfly I0125 05:13:33.652330 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/mariadb I0125 05:13:33.652344 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/jenkins I0125 05:13:33.661651 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:33.671472 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:13:33.680348 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:33.689984 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:33.702570 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:33.722292 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:33.741482 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:33.756435 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:13:33.756496 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:13:33.756618 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:13:33.761398 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:33.761424 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:33.781888 4678 reflector.go:273] github.com/openshift/origin/pkg/project/controller/factory.go:36: forcing resync I0125 05:13:33.781888 4678 proxier.go:797] syncProxyRules took 196.233074ms I0125 05:13:33.781920 4678 proxier.go:566] OnEndpointsUpdate took 196.334547ms for 6 endpoints I0125 05:13:33.781981 4678 proxier.go:381] Received update notice: [] I0125 05:13:33.782024 4678 proxier.go:804] Syncing iptables rules I0125 05:13:33.782037 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:33.800607 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:33.812056 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:13:33.819185 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:33.838532 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:33.851199 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:13:33.857193 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:33.876373 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:33.895618 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:33.915563 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:33.935369 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:33.956072 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:33.956105 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:33.976785 4678 proxier.go:797] syncProxyRules took 194.754142ms I0125 05:13:33.976818 4678 proxier.go:431] OnServiceUpdate took 194.822291ms for 4 services I0125 05:13:33.988685 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:13:34.008224 4678 gc_controller.go:175] GC'ing orphaned I0125 05:13:34.008244 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:13:34.030730 4678 helpers.go:101] Unable to get network stats from pid 10368: couldn't read network stats: failure opening /proc/10368/net/dev: open /proc/10368/net/dev: no such file or directory I0125 05:13:34.068545 4678 audit.go:125] 2017-01-25T05:13:34.068500487-05:00 AUDIT: id="b8562886-499b-4de4-9d9c-3b29b2381f16" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:34.070064 4678 audit.go:45] 2017-01-25T05:13:34.070051114-05:00 AUDIT: id="b8562886-499b-4de4-9d9c-3b29b2381f16" response="200" I0125 05:13:34.070149 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.546324ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:34.132436 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:13:34.132520 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:13:34.132541 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:34.132571 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:34.132600 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:13:34.132606 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:34.132615 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:34.132679 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:34.132689 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:34.132694 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:34.132698 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:34.132740 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:34.132745 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:13:34.132777 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:13:34.132782 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:34.132794 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:34.132803 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:34.133028 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:13:34.133039 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:13:34.133079 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:13:34.133098 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:34.133106 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:13:34.133117 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:34.133128 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:13:34.133132 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:34.133136 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:35.068182 4678 audit.go:125] 2017-01-25T05:13:35.068132793-05:00 AUDIT: id="92551e07-4202-4ab5-9c12-5459bf081a9d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:35.069544 4678 audit.go:45] 2017-01-25T05:13:35.069532393-05:00 AUDIT: id="92551e07-4202-4ab5-9c12-5459bf081a9d" response="200" I0125 05:13:35.069623 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.085407ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:35.272848 4678 audit.go:125] 2017-01-25T05:13:35.272810997-05:00 AUDIT: id="6d4c21da-faa7-4fad-95de-01e0c95198ac" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:13:35.273854 4678 audit.go:45] 2017-01-25T05:13:35.273842818-05:00 AUDIT: id="6d4c21da-faa7-4fad-95de-01e0c95198ac" response="200" I0125 05:13:35.273928 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.347671ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:35.684629 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:36.068311 4678 audit.go:125] 2017-01-25T05:13:36.068268059-05:00 AUDIT: id="99f377a5-1f1c-46dc-93ef-d528de058756" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:36.069683 4678 audit.go:45] 2017-01-25T05:13:36.069672173-05:00 AUDIT: id="99f377a5-1f1c-46dc-93ef-d528de058756" response="200" I0125 05:13:36.069760 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.217109ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:36.594236 4678 panics.go:76] GET /oapi/v1/watch/builds?resourceVersion=9885&timeoutSeconds=578: (9m38.002714181s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783 system:serviceaccount:openshift-infra:build-controller] 172.18.7.222:50846] I0125 05:13:36.594505 4678 reflector.go:392] github.com/openshift/origin/pkg/build/controller/factory/factory.go:145: Watch close - *api.Build total 14 items received I0125 05:13:36.596822 4678 audit.go:125] 2017-01-25T05:13:36.596791143-05:00 AUDIT: id="833a9f17-4867-46be-8fa7-67b03291c5b3" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:build-controller" as="" asgroups="" namespace="" uri="/oapi/v1/watch/builds?resourceVersion=10734&timeoutSeconds=364" I0125 05:13:36.597261 4678 audit.go:45] 2017-01-25T05:13:36.597247762-05:00 AUDIT: id="833a9f17-4867-46be-8fa7-67b03291c5b3" response="200" I0125 05:13:36.796980 4678 worker.go:45] 0 Health Check Listeners I0125 05:13:36.797005 4678 worker.go:46] 4 Services registered for health checking I0125 05:13:36.797011 4678 worker.go:50] Service default/docker-registry has 1 local endpoints I0125 05:13:36.797016 4678 worker.go:50] Service default/router has 1 local endpoints I0125 05:13:36.797021 4678 worker.go:50] Service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper has 1 local endpoints I0125 05:13:36.797026 4678 worker.go:50] Service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master has 1 local endpoints I0125 05:13:37.068243 4678 audit.go:125] 2017-01-25T05:13:37.068178338-05:00 AUDIT: id="2962bdbe-2638-46ab-ad77-1798d41a2b9b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:37.069614 4678 audit.go:45] 2017-01-25T05:13:37.069602675-05:00 AUDIT: id="2962bdbe-2638-46ab-ad77-1798d41a2b9b" response="200" I0125 05:13:37.069706 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.16454ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:37.072627 4678 proxier.go:804] Syncing iptables rules I0125 05:13:37.072641 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:13:37.091996 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:13:37.110609 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:37.129307 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:37.154938 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:13:37.167414 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:37.167472 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:37.184942 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:13:37.232821 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:13:37.258535 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:13:37.288699 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:13:37.323856 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:13:37.324651 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:13:37.387350 4678 proxier.go:797] syncProxyRules took 314.720186ms I0125 05:13:37.387401 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:13:37.422417 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:13:37.488423 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:13:37.523527 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:13:37.588292 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:13:37.603125 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-2-deploy I0125 05:13:37.603214 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-slave-1-deploy I0125 05:13:37.603306 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-1-6jfgj I0125 05:13:37.603374 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:13:37.614162 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:13:37.633769 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:13:37.652276 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:13:37.670976 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:13:37.684646 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:37.692810 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:13:37.789497 4678 audit.go:125] 2017-01-25T05:13:37.789457068-05:00 AUDIT: id="865a11f0-175e-4fc4-b39e-4bd6ade0e314" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:13:37.789934 4678 audit.go:45] 2017-01-25T05:13:37.789920827-05:00 AUDIT: id="865a11f0-175e-4fc4-b39e-4bd6ade0e314" response="200" I0125 05:13:37.790303 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.068011ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:37.864695 4678 audit.go:125] 2017-01-25T05:13:37.864652202-05:00 AUDIT: id="9b31f99b-6c91-4c09-b195-d6bbec586bc6" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:13:37.869056 4678 audit.go:45] 2017-01-25T05:13:37.869018902-05:00 AUDIT: id="9b31f99b-6c91-4c09-b195-d6bbec586bc6" response="200" I0125 05:13:37.869504 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (5.130237ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:37.871452 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:13:38.068466 4678 audit.go:125] 2017-01-25T05:13:38.068425186-05:00 AUDIT: id="2ec0ace0-50ab-4972-8277-f717e1668302" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:38.069953 4678 audit.go:45] 2017-01-25T05:13:38.069935956-05:00 AUDIT: id="2ec0ace0-50ab-4972-8277-f717e1668302" response="200" I0125 05:13:38.070037 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.44554ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:38.145543 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-hugepages.mount: invalid container name I0125 05:13:38.145565 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-hugepages.mount" I0125 05:13:38.145574 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-hugepages.mount", but ignoring. I0125 05:13:38.145582 4678 manager.go:867] ignoring container "/system.slice/dev-hugepages.mount" I0125 05:13:38.145609 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-daedc0da\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:13:38.145615 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:13:38.145625 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:13:38.145635 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:13:38.145665 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b76687cc\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:13:38.145668 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:13:38.145676 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:13:38.145685 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:13:38.145703 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-config.mount: invalid container name I0125 05:13:38.145706 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-config.mount" I0125 05:13:38.145710 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-config.mount", but ignoring. I0125 05:13:38.145716 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-config.mount" I0125 05:13:38.145722 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-mqueue.mount: invalid container name I0125 05:13:38.145725 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-mqueue.mount" I0125 05:13:38.145729 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-mqueue.mount", but ignoring. I0125 05:13:38.145735 4678 manager.go:867] ignoring container "/system.slice/dev-mqueue.mount" I0125 05:13:38.145760 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-router\x2dtoken\x2ds79l8.mount: invalid container name I0125 05:13:38.145766 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:13:38.145775 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount", but ignoring. I0125 05:13:38.145784 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:13:38.145794 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-debug.mount: invalid container name I0125 05:13:38.145797 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-debug.mount" I0125 05:13:38.145801 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-debug.mount", but ignoring. I0125 05:13:38.145806 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-debug.mount" I0125 05:13:38.145816 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir.mount: invalid container name I0125 05:13:38.145819 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:13:38.145823 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount", but ignoring. I0125 05:13:38.145829 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:13:38.145854 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-e932e61a\x2de2d9\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-registry\x2dtoken\x2dvjbst.mount: invalid container name I0125 05:13:38.145857 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:13:38.145864 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount", but ignoring. I0125 05:13:38.145876 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:13:38.145905 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-server\x2dcertificate.mount: invalid container name I0125 05:13:38.145908 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:13:38.145916 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount", but ignoring. I0125 05:13:38.145924 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:13:38.145937 4678 factory.go:104] Error trying to work out if we can handle /system.slice/run-user-1000.mount: invalid container name I0125 05:13:38.145940 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/run-user-1000.mount" I0125 05:13:38.145944 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/run-user-1000.mount", but ignoring. I0125 05:13:38.145949 4678 manager.go:867] ignoring container "/system.slice/run-user-1000.mount" I0125 05:13:38.145954 4678 factory.go:104] Error trying to work out if we can handle /system.slice/-.mount: invalid container name I0125 05:13:38.145957 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/-.mount" I0125 05:13:38.145961 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/-.mount", but ignoring. I0125 05:13:38.145966 4678 manager.go:867] ignoring container "/system.slice/-.mount" I0125 05:13:38.145979 4678 manager.go:955] Destroyed container: "/system.slice/docker-bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741.scope" (aliases: [k8s_POD.73b4fecf_postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8_b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094_ec0e86ef bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741], namespace: "docker") I0125 05:13:38.145996 4678 handler.go:325] Added event &{/system.slice/docker-bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741.scope 2017-01-25 05:13:38.145989834 -0500 EST containerDeletion {}} I0125 05:13:38.146018 4678 manager.go:955] Destroyed container: "/system.slice/docker-0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f.scope" (aliases: [k8s_postgresql-master.e7ea033_postgresql-master-1-6jfgj_extended-test-postgresql-replication-1-34bbd-xd4g8_b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094_88f8e310 0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f], namespace: "docker") I0125 05:13:38.146028 4678 handler.go:325] Added event &{/system.slice/docker-0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f.scope 2017-01-25 05:13:38.146026155 -0500 EST containerDeletion {}} I0125 05:13:38.146037 4678 manager.go:955] Destroyed container: "/system.slice/docker-1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48.scope" (aliases: [k8s_postgresql-slave.db39a3b3_postgresql-slave-1-qt1rc_extended-test-postgresql-replication-1-34bbd-xd4g8_b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094_089e472a 1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48], namespace: "docker") I0125 05:13:38.146044 4678 handler.go:325] Added event &{/system.slice/docker-1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48.scope 2017-01-25 05:13:38.14604262 -0500 EST containerDeletion {}} I0125 05:13:38.146055 4678 manager.go:955] Destroyed container: "/system.slice/docker-a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f.scope" (aliases: [k8s_deployment.7770d39a_postgresql-master-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b3924f08-e2e6-11e6-a4b0-0e6a5cbf0094_04ad5900 a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f], namespace: "docker") I0125 05:13:38.146063 4678 handler.go:325] Added event &{/system.slice/docker-a14e6dfd9e5b137072e3f046c31ac67344b34cf0cd53f764c9c2b1814345c70f.scope 2017-01-25 05:13:38.146061175 -0500 EST containerDeletion {}} I0125 05:13:38.146071 4678 manager.go:955] Destroyed container: "/system.slice/docker-764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f.scope" (aliases: [k8s_POD.f321dce3_postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094_882bf590 764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f], namespace: "docker") I0125 05:13:38.146078 4678 handler.go:325] Added event &{/system.slice/docker-764297878a87c434af8227bd2bedbb2924a01760b7885779eb1e59021ca09d1f.scope 2017-01-25 05:13:38.146076522 -0500 EST containerDeletion {}} I0125 05:13:38.146086 4678 manager.go:955] Destroyed container: "/system.slice/docker-269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d.scope" (aliases: [k8s_deployment.46f5d329_postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094_2f5e0764 269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d], namespace: "docker") I0125 05:13:38.146094 4678 handler.go:325] Added event &{/system.slice/docker-269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d.scope 2017-01-25 05:13:38.146092501 -0500 EST containerDeletion {}} I0125 05:13:38.146102 4678 manager.go:955] Destroyed container: "/system.slice/docker-3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28.scope" (aliases: [k8s_deployment.7bb3d39b_postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094_08cf920c 3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28], namespace: "docker") I0125 05:13:38.146109 4678 handler.go:325] Added event &{/system.slice/docker-3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28.scope 2017-01-25 05:13:38.146107791 -0500 EST containerDeletion {}} I0125 05:13:38.146120 4678 manager.go:955] Destroyed container: "/system.slice/docker-2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d.scope" (aliases: [k8s_POD.f321dce3_postgresql-master-2-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8_d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094_f5d33a89 2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d], namespace: "docker") I0125 05:13:38.146127 4678 handler.go:325] Added event &{/system.slice/docker-2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d.scope 2017-01-25 05:13:38.146125931 -0500 EST containerDeletion {}} I0125 05:13:38.368711 4678 audit.go:125] 2017-01-25T05:13:38.368676826-05:00 AUDIT: id="cc6229ba-9481-4fa9-9b5a-debaf94ff0ab" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:38.369106 4678 audit.go:45] 2017-01-25T05:13:38.369096596-05:00 AUDIT: id="cc6229ba-9481-4fa9-9b5a-debaf94ff0ab" response="200" I0125 05:13:38.369453 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.01436ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:38.369718 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:13:39.068114 4678 audit.go:125] 2017-01-25T05:13:39.068071675-05:00 AUDIT: id="51c95285-dbb3-4482-a315-9b1ad3a8a512" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:39.069449 4678 audit.go:45] 2017-01-25T05:13:39.069437775-05:00 AUDIT: id="51c95285-dbb3-4482-a315-9b1ad3a8a512" response="200" I0125 05:13:39.069518 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.992587ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:39.231808 4678 audit.go:125] 2017-01-25T05:13:39.231766651-05:00 AUDIT: id="4f215f4e-90cd-4e39-9c31-61c87d190578" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:13:39.232634 4678 audit.go:45] 2017-01-25T05:13:39.232621036-05:00 AUDIT: id="4f215f4e-90cd-4e39-9c31-61c87d190578" response="200" I0125 05:13:39.232709 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.829224ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:39.232960 4678 controller.go:106] Found 0 cronjobs I0125 05:13:39.234647 4678 audit.go:125] 2017-01-25T05:13:39.234624136-05:00 AUDIT: id="4533a6ed-1301-4b9c-b74c-b8f1dc25c168" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:13:39.235445 4678 audit.go:45] 2017-01-25T05:13:39.235434815-05:00 AUDIT: id="4533a6ed-1301-4b9c-b74c-b8f1dc25c168" response="200" I0125 05:13:39.235509 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.314261ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:39.235727 4678 controller.go:114] Found 0 jobs I0125 05:13:39.235734 4678 controller.go:117] Found 0 groups I0125 05:13:39.261031 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:39.261061 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:39.261707 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:39.261724 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:39.262235 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc43314a620 -1 [] true false map[] 0xc4321b64b0 } I0125 05:13:39.262284 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:39.262357 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc4368ba9e0 -1 [] true false map[] 0xc42ee55c20 } I0125 05:13:39.262393 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:39.684612 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:40.068162 4678 audit.go:125] 2017-01-25T05:13:40.06812115-05:00 AUDIT: id="b75e24a6-cd55-4efd-b0e4-15327bd9a968" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:40.069510 4678 audit.go:45] 2017-01-25T05:13:40.069498996-05:00 AUDIT: id="b75e24a6-cd55-4efd-b0e4-15327bd9a968" response="200" I0125 05:13:40.069591 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.070561ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:41.068126 4678 audit.go:125] 2017-01-25T05:13:41.068086863-05:00 AUDIT: id="8e7a1887-8f33-4609-a31f-22933e027a5b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:41.069455 4678 audit.go:45] 2017-01-25T05:13:41.069443108-05:00 AUDIT: id="8e7a1887-8f33-4609-a31f-22933e027a5b" response="200" I0125 05:13:41.069525 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.006084ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:41.684611 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:41.939063 4678 audit.go:125] 2017-01-25T05:13:41.939027684-05:00 AUDIT: id="b9fe5c45-dbaa-49cb-a483-fb606a4063c6" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:13:41.940144 4678 audit.go:45] 2017-01-25T05:13:41.940132176-05:00 AUDIT: id="b9fe5c45-dbaa-49cb-a483-fb606a4063c6" response="200" I0125 05:13:41.940239 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.421353ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:42.068099 4678 audit.go:125] 2017-01-25T05:13:42.068060962-05:00 AUDIT: id="63edc506-2a54-41a2-a2f3-81de4ce5f92a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:42.069414 4678 audit.go:45] 2017-01-25T05:13:42.069398624-05:00 AUDIT: id="63edc506-2a54-41a2-a2f3-81de4ce5f92a" response="200" I0125 05:13:42.069488 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.962636ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:42.485873 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:13:42.486348 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:13:42.498898 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:42.548733 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:42.548757 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:13:42.904451 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:13:42.904928 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:42.905422 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:42.956117 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:42.956143 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:42.967155 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:42.967181 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:42.967803 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:42.967826 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:42.968236 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:42 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4347e08a0 0 [] true false map[] 0xc42b069770 } I0125 05:13:42.968286 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:42.968380 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:42 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4347e09c0 0 [] true false map[] 0xc42b069a40 } I0125 05:13:42.968411 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:43.068103 4678 audit.go:125] 2017-01-25T05:13:43.068055416-05:00 AUDIT: id="52d46205-bdd5-462b-a155-acc59332a245" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:43.069456 4678 audit.go:45] 2017-01-25T05:13:43.06944288-05:00 AUDIT: id="52d46205-bdd5-462b-a155-acc59332a245" response="200" I0125 05:13:43.069526 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.034551ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:43.370560 4678 audit.go:125] 2017-01-25T05:13:43.370528585-05:00 AUDIT: id="d1c0852c-fa98-4f55-91f6-fc982fd0646d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:43.370975 4678 audit.go:45] 2017-01-25T05:13:43.370962263-05:00 AUDIT: id="d1c0852c-fa98-4f55-91f6-fc982fd0646d" response="200" I0125 05:13:43.371309 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (980.441µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:43.684619 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094) I0125 05:13:43.684668 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:43.684767 4678 kubelet_pods.go:1029] Generating status for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:43.684965 4678 status_manager.go:312] Ignoring same status for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:29 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.18.7.222 StartTime:2017-01-25 03:41:09 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:router State:{Waiting: Running:0xc434434780 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-haproxy-router:86a9783 ImageID:docker://sha256:0e944dc1f6ca904b8892fd8e5da5ec5cf13c0f673b44380cc81c1fdbc53b379e ContainerID:docker://38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018}]} I0125 05:13:43.685092 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:43.702768 4678 secret.go:179] Setting up volume router-token-s79l8 for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:13:43.702768 4678 secret.go:179] Setting up volume server-certificate for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:13:43.703554 4678 audit.go:125] 2017-01-25T05:13:43.703525233-05:00 AUDIT: id="11c90db0-223d-4f24-a5a0-7b4e9bbe3e79" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-token-s79l8" I0125 05:13:43.704161 4678 audit.go:125] 2017-01-25T05:13:43.704141342-05:00 AUDIT: id="a9c7e967-dded-42ca-ac46-30b0de8174d8" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-certs" I0125 05:13:43.705216 4678 audit.go:45] 2017-01-25T05:13:43.705186949-05:00 AUDIT: id="a9c7e967-dded-42ca-ac46-30b0de8174d8" response="200" I0125 05:13:43.705411 4678 audit.go:45] 2017-01-25T05:13:43.705402714-05:00 AUDIT: id="11c90db0-223d-4f24-a5a0-7b4e9bbe3e79" response="200" I0125 05:13:43.705582 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-certs: (1.577746ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:43.705620 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-token-s79l8: (2.282656ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:43.705898 4678 secret.go:206] Received secret default/router-token-s79l8 containing (4) pieces of data, 4105 total bytes I0125 05:13:43.706050 4678 secret.go:206] Received secret default/router-certs containing (2) pieces of data, 6633 total bytes I0125 05:13:43.706277 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume server-certificate: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:13:43.706310 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-server-certificate" (spec.Name: "server-certificate") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:43.706391 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume router-token-s79l8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:13:43.706400 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-router-token-s79l8" (spec.Name: "router-token-s79l8") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:13:43.985359 4678 volume_manager.go:365] All volumes are attached and mounted for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:43.986001 4678 audit.go:125] 2017-01-25T05:13:43.98596626-05:00 AUDIT: id="036387ef-ecae-49a3-848f-5315c5b769d9" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-dockercfg-g5x9s" I0125 05:13:43.987115 4678 audit.go:45] 2017-01-25T05:13:43.987104101-05:00 AUDIT: id="036387ef-ecae-49a3-848f-5315c5b769d9" response="200" I0125 05:13:43.987306 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-dockercfg-g5x9s: (1.554727ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:43.987492 4678 docker_manager.go:1938] Found pod infra container for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:43.989409 4678 docker_manager.go:1951] Pod infra container looks good, keep it "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:13:43.989423 4678 docker_manager.go:1999] pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" container "router" exists as 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018 I0125 05:13:43.989569 4678 docker_manager.go:2086] Got container changes for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985:-1 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018:0]} I0125 05:13:44.068037 4678 audit.go:125] 2017-01-25T05:13:44.067999612-05:00 AUDIT: id="a59d7afd-bb35-4ce0-9e55-8a14fbe7fe19" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:44.069393 4678 audit.go:45] 2017-01-25T05:13:44.069382547-05:00 AUDIT: id="a59d7afd-bb35-4ce0-9e55-8a14fbe7fe19" response="200" I0125 05:13:44.069464 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.944439ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:45.068115 4678 audit.go:125] 2017-01-25T05:13:45.068076475-05:00 AUDIT: id="993c9187-d5ad-457e-a1fa-d83a43e80425" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:45.069426 4678 audit.go:45] 2017-01-25T05:13:45.069413974-05:00 AUDIT: id="993c9187-d5ad-457e-a1fa-d83a43e80425" response="200" I0125 05:13:45.069498 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.982886ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:45.278814 4678 audit.go:125] 2017-01-25T05:13:45.278777297-05:00 AUDIT: id="8ec78184-901a-470a-9117-fdfe8279b51c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:13:45.279758 4678 audit.go:45] 2017-01-25T05:13:45.27974722-05:00 AUDIT: id="8ec78184-901a-470a-9117-fdfe8279b51c" response="200" I0125 05:13:45.279822 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.251156ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:45.684611 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:46.068111 4678 audit.go:125] 2017-01-25T05:13:46.068063646-05:00 AUDIT: id="3810e804-82a6-40be-8b9d-2b65ebe573d4" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:46.069480 4678 audit.go:45] 2017-01-25T05:13:46.069460809-05:00 AUDIT: id="3810e804-82a6-40be-8b9d-2b65ebe573d4" response="200" I0125 05:13:46.069551 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.021465ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:47.068163 4678 audit.go:125] 2017-01-25T05:13:47.068123215-05:00 AUDIT: id="4778bf44-9141-4979-a072-c210173be1e9" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:47.069428 4678 audit.go:45] 2017-01-25T05:13:47.069413578-05:00 AUDIT: id="4778bf44-9141-4979-a072-c210173be1e9" response="200" I0125 05:13:47.069502 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.985459ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:47.616626 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:47.616645 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:47.685054 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:47.727507 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:13:47.871175 4678 audit.go:125] 2017-01-25T05:13:47.871132754-05:00 AUDIT: id="4437b814-85e2-4a6b-b2ab-36309b332427" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:13:47.871590 4678 audit.go:45] 2017-01-25T05:13:47.871580548-05:00 AUDIT: id="4437b814-85e2-4a6b-b2ab-36309b332427" response="200" I0125 05:13:47.871887 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (960.308µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:47.940937 4678 audit.go:125] 2017-01-25T05:13:47.940899233-05:00 AUDIT: id="4b298063-bbd5-42ad-a2d7-e71a54ea1b4f" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:13:47.943051 4678 audit.go:45] 2017-01-25T05:13:47.943034993-05:00 AUDIT: id="4b298063-bbd5-42ad-a2d7-e71a54ea1b4f" response="200" I0125 05:13:47.943360 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.667048ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:47.944205 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:13:48.068024 4678 audit.go:125] 2017-01-25T05:13:48.067983216-05:00 AUDIT: id="ea13459f-42b0-44f5-b141-6d0edf71cf99" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:48.069335 4678 audit.go:45] 2017-01-25T05:13:48.069324453-05:00 AUDIT: id="ea13459f-42b0-44f5-b141-6d0edf71cf99" response="200" I0125 05:13:48.069410 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.886142ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:48.372506 4678 audit.go:125] 2017-01-25T05:13:48.372457242-05:00 AUDIT: id="70ce57f7-8a5a-40f6-8863-03a81fff80c1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:48.372912 4678 audit.go:45] 2017-01-25T05:13:48.372902657-05:00 AUDIT: id="70ce57f7-8a5a-40f6-8863-03a81fff80c1" response="200" I0125 05:13:48.373218 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (961.308µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:48.373443 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:13:49.068106 4678 audit.go:125] 2017-01-25T05:13:49.068065797-05:00 AUDIT: id="9301dd3e-d221-463c-8a8e-2a6caa8c7400" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:49.069371 4678 audit.go:45] 2017-01-25T05:13:49.069359397-05:00 AUDIT: id="9301dd3e-d221-463c-8a8e-2a6caa8c7400" response="200" I0125 05:13:49.069452 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.01663ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:49.132689 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:13:49.132775 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:13:49.132795 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:49.132816 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:49.132827 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:13:49.132846 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:49.132857 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:49.132949 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:49.132955 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:49.132960 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:49.132964 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:49.132998 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:13:49.133007 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:13:49.133044 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:13:49.133050 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:13:49.133062 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:49.133073 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:49.133213 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:13:49.133240 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:13:49.133294 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:13:49.133311 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:13:49.133316 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:13:49.133326 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:13:49.133337 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:13:49.133354 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:13:49.133363 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:13:49.238044 4678 audit.go:125] 2017-01-25T05:13:49.238014474-05:00 AUDIT: id="5aed259f-ad93-40cb-b4c9-af141698953a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:13:49.238822 4678 audit.go:45] 2017-01-25T05:13:49.238811603-05:00 AUDIT: id="5aed259f-ad93-40cb-b4c9-af141698953a" response="200" I0125 05:13:49.238901 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.599979ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:49.239156 4678 controller.go:106] Found 0 cronjobs I0125 05:13:49.240851 4678 audit.go:125] 2017-01-25T05:13:49.240831813-05:00 AUDIT: id="61156340-5e8e-4d46-a9b0-b8ca4283c8c0" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:13:49.241591 4678 audit.go:45] 2017-01-25T05:13:49.241581226-05:00 AUDIT: id="61156340-5e8e-4d46-a9b0-b8ca4283c8c0" response="200" I0125 05:13:49.241648 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.258328ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:49.241843 4678 controller.go:114] Found 0 jobs I0125 05:13:49.241852 4678 controller.go:117] Found 0 groups I0125 05:13:49.261006 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:49.261032 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:49.261583 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:49.261599 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:49.262161 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc43337a200 -1 [] true false map[] 0xc42ffcb770 } I0125 05:13:49.262229 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:49.262373 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc43337a2e0 -1 [] true false map[] 0xc434f3cc30 } I0125 05:13:49.262397 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:49.684608 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:50.068067 4678 audit.go:125] 2017-01-25T05:13:50.06802891-05:00 AUDIT: id="2ef7f335-d5ee-40a1-ad06-1663e441aa05" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:50.069357 4678 audit.go:45] 2017-01-25T05:13:50.069344904-05:00 AUDIT: id="2ef7f335-d5ee-40a1-ad06-1663e441aa05" response="200" I0125 05:13:50.069439 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.960673ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:51.068209 4678 audit.go:125] 2017-01-25T05:13:51.068159552-05:00 AUDIT: id="8db15181-c11f-4337-bf7d-9f0870c73c6b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:51.069497 4678 audit.go:45] 2017-01-25T05:13:51.069481765-05:00 AUDIT: id="8db15181-c11f-4337-bf7d-9f0870c73c6b" response="200" I0125 05:13:51.069569 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.979631ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:51.684613 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:51.941392 4678 audit.go:125] 2017-01-25T05:13:51.941354081-05:00 AUDIT: id="60860b82-3dda-463f-b618-456c0049cef6" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:13:51.942480 4678 audit.go:45] 2017-01-25T05:13:51.942469838-05:00 AUDIT: id="60860b82-3dda-463f-b618-456c0049cef6" response="200" I0125 05:13:51.942566 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.445381ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:52.068137 4678 audit.go:125] 2017-01-25T05:13:52.068097617-05:00 AUDIT: id="0c54dbdf-7335-4bb8-9083-8450e50cc7b6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:52.069433 4678 audit.go:45] 2017-01-25T05:13:52.06941397-05:00 AUDIT: id="0c54dbdf-7335-4bb8-9083-8450e50cc7b6" response="200" I0125 05:13:52.069504 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.931533ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:52.485842 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:13:52.486265 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:13:52.498896 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:52.550184 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:52.550207 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:13:52.904440 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:13:52.904860 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:52.905425 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:13:52.957003 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:13:52.957025 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:13:52.967153 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:52.967183 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:52.968193 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:13:52.968211 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:52.968448 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:52 GMT]] 0xc43480cea0 0 [] true false map[] 0xc4360e7d10 } I0125 05:13:52.968509 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:52.969183 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:13:52 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc431c9d720 0 [] true false map[] 0xc42e1d0d20 } I0125 05:13:52.969235 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:13:53.067997 4678 audit.go:125] 2017-01-25T05:13:53.067960592-05:00 AUDIT: id="b904be9b-4dfe-409d-a7b8-c1b77c1e899c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:53.069361 4678 audit.go:45] 2017-01-25T05:13:53.069349899-05:00 AUDIT: id="b904be9b-4dfe-409d-a7b8-c1b77c1e899c" response="200" I0125 05:13:53.069430 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.892686ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:53.374350 4678 audit.go:125] 2017-01-25T05:13:53.37431374-05:00 AUDIT: id="ec910582-f795-4af6-a335-ce15d594d7a1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:53.374761 4678 audit.go:45] 2017-01-25T05:13:53.374752487-05:00 AUDIT: id="ec910582-f795-4af6-a335-ce15d594d7a1" response="200" I0125 05:13:53.375074 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (973.486µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:53.684604 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:54.008475 4678 gc_controller.go:175] GC'ing orphaned I0125 05:13:54.008498 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:13:54.068223 4678 audit.go:125] 2017-01-25T05:13:54.068169413-05:00 AUDIT: id="b64dc476-9caa-4851-a409-9d00935e159e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:54.069543 4678 audit.go:45] 2017-01-25T05:13:54.069531593-05:00 AUDIT: id="b64dc476-9caa-4851-a409-9d00935e159e" response="200" I0125 05:13:54.069614 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.018643ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:54.136001 4678 panics.go:76] GET /api/v1/watch/services?resourceVersion=10097&timeoutSeconds=434: (7m14.000893075s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:54.136233 4678 reflector.go:392] github.com/openshift/origin/pkg/dns/serviceaccessor.go:45: Watch close - *api.Service total 11 items received I0125 05:13:54.136856 4678 audit.go:125] 2017-01-25T05:13:54.136827196-05:00 AUDIT: id="9502036f-a9aa-4af3-b180-c1e3c2172572" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/services?resourceVersion=10976&timeoutSeconds=419" I0125 05:13:54.137321 4678 audit.go:45] 2017-01-25T05:13:54.137308346-05:00 AUDIT: id="9502036f-a9aa-4af3-b180-c1e3c2172572" response="200" I0125 05:13:55.068160 4678 audit.go:125] 2017-01-25T05:13:55.068104534-05:00 AUDIT: id="38aca656-4b68-4445-86fd-d4bb6defd97b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:55.069449 4678 audit.go:45] 2017-01-25T05:13:55.069433521-05:00 AUDIT: id="38aca656-4b68-4445-86fd-d4bb6defd97b" response="200" I0125 05:13:55.069525 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.947798ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:55.284490 4678 audit.go:125] 2017-01-25T05:13:55.284459266-05:00 AUDIT: id="a5e2a1c2-49bb-465b-8653-8fbc0041b9b5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:13:55.285441 4678 audit.go:45] 2017-01-25T05:13:55.285429993-05:00 AUDIT: id="a5e2a1c2-49bb-465b-8653-8fbc0041b9b5" response="200" I0125 05:13:55.285501 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.239318ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:55.684641 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:56.068220 4678 audit.go:125] 2017-01-25T05:13:56.06815519-05:00 AUDIT: id="f6977cab-7325-4478-a57c-3a22621ff74e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:56.069553 4678 audit.go:45] 2017-01-25T05:13:56.069542497-05:00 AUDIT: id="f6977cab-7325-4478-a57c-3a22621ff74e" response="200" I0125 05:13:56.069640 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.104789ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:57.068089 4678 audit.go:125] 2017-01-25T05:13:57.068046498-05:00 AUDIT: id="2dee92e4-7471-4695-b0c9-22a98538091f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:57.069433 4678 audit.go:45] 2017-01-25T05:13:57.069418005-05:00 AUDIT: id="2dee92e4-7471-4695-b0c9-22a98538091f" response="200" I0125 05:13:57.069504 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.95318ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:57.684592 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:57.756871 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:57.756889 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:13:57.851111 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:13:57.944752 4678 audit.go:125] 2017-01-25T05:13:57.944717673-05:00 AUDIT: id="c68d578e-6ed7-41dc-904a-371b3871c8e0" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:13:57.945126 4678 audit.go:45] 2017-01-25T05:13:57.94511713-05:00 AUDIT: id="c68d578e-6ed7-41dc-904a-371b3871c8e0" response="200" I0125 05:13:57.945449 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (915.892µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:58.012038 4678 audit.go:125] 2017-01-25T05:13:58.0120066-05:00 AUDIT: id="1e45a30c-2bf6-4809-bfaa-a0edefe03d76" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:13:58.014161 4678 audit.go:45] 2017-01-25T05:13:58.014145859-05:00 AUDIT: id="1e45a30c-2bf6-4809-bfaa-a0edefe03d76" response="200" I0125 05:13:58.014404 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.591471ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:13:58.015261 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:13:58.068197 4678 audit.go:125] 2017-01-25T05:13:58.068150452-05:00 AUDIT: id="f3f35ad8-9130-41da-9131-8eccd69e3b4e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:58.069495 4678 audit.go:45] 2017-01-25T05:13:58.069477933-05:00 AUDIT: id="f3f35ad8-9130-41da-9131-8eccd69e3b4e" response="200" I0125 05:13:58.069570 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.042147ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:58.376171 4678 audit.go:125] 2017-01-25T05:13:58.376136255-05:00 AUDIT: id="425fa71d-802b-41b5-99df-bbbbbcd4ff22" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:13:58.376623 4678 audit.go:45] 2017-01-25T05:13:58.376610775-05:00 AUDIT: id="425fa71d-802b-41b5-99df-bbbbbcd4ff22" response="200" I0125 05:13:58.376930 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (992.939µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:13:58.377210 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:13:58.710239 4678 container_gc.go:249] Removing container "2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d" name "POD" I0125 05:13:59.054610 4678 container_gc.go:249] Removing container "0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f" name "postgresql-master" I0125 05:13:59.068146 4678 audit.go:125] 2017-01-25T05:13:59.068098962-05:00 AUDIT: id="76ee747f-a5e9-4062-b6ab-48b9765ddcc7" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:13:59.069468 4678 audit.go:45] 2017-01-25T05:13:59.069455806-05:00 AUDIT: id="76ee747f-a5e9-4062-b6ab-48b9765ddcc7" response="200" I0125 05:13:59.069542 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.013889ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:13:59.244638 4678 audit.go:125] 2017-01-25T05:13:59.244594322-05:00 AUDIT: id="894f92ef-0ccf-407d-b047-5278233f07f8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:13:59.245731 4678 audit.go:45] 2017-01-25T05:13:59.245715576-05:00 AUDIT: id="894f92ef-0ccf-407d-b047-5278233f07f8" response="200" I0125 05:13:59.245823 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.333888ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:59.246096 4678 controller.go:106] Found 0 cronjobs I0125 05:13:59.248174 4678 audit.go:125] 2017-01-25T05:13:59.24814362-05:00 AUDIT: id="5300d8d5-0854-4de1-bab4-f28bab814072" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:13:59.249065 4678 audit.go:45] 2017-01-25T05:13:59.249051254-05:00 AUDIT: id="5300d8d5-0854-4de1-bab4-f28bab814072" response="200" I0125 05:13:59.249138 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.777715ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:13:59.249417 4678 controller.go:114] Found 0 jobs I0125 05:13:59.249430 4678 controller.go:117] Found 0 groups I0125 05:13:59.261004 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:59.261037 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:59.262114 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc4371990e0 -1 [] true false map[] 0xc43310d2c0 } I0125 05:13:59.262162 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:59.262245 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:13:59.262257 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:13:59.262844 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc4345ce1c0 -1 [] true false map[] 0xc43310d4a0 } I0125 05:13:59.262884 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:13:59.352856 4678 generic.go:145] GenericPLEG: b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f: exited -> unknown I0125 05:13:59.352882 4678 generic.go:145] GenericPLEG: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/2e93a8e8d48d3b07f1bdd4e0205122011925c096da17cf574c94712edf48b56d: exited -> non-existent I0125 05:13:59.684658 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:13:59.692213 4678 container_gc.go:249] Removing container "1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48" name "postgresql-slave" I0125 05:13:59.692411 4678 generic.go:342] PLEG: Write status for postgresql-master-1-6jfgj/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-1-6jfgj", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus(nil), SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: Error response from daemon: {"message":"devmapper: Unknown device 5252cc7b5f2e9834ed81c8f25adf5f95a9ede193511871580dc75b99c8313b85"}) E0125 05:13:59.692456 4678 generic.go:238] PLEG: Ignoring events for pod postgresql-master-1-6jfgj/extended-test-postgresql-replication-1-34bbd-xd4g8: Error response from daemon: {"message":"devmapper: Unknown device 5252cc7b5f2e9834ed81c8f25adf5f95a9ede193511871580dc75b99c8313b85"} I0125 05:14:00.070764 4678 audit.go:125] 2017-01-25T05:14:00.070709793-05:00 AUDIT: id="04d5c3d8-2ac2-4fc7-a1ee-d02b0b0c54d6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:00.072340 4678 audit.go:45] 2017-01-25T05:14:00.072321394-05:00 AUDIT: id="04d5c3d8-2ac2-4fc7-a1ee-d02b0b0c54d6" response="200" I0125 05:14:00.072431 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.443308ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:00.238680 4678 container_gc.go:249] Removing container "bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741" name "POD" I0125 05:14:00.239404 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42f5b29a0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/containers/deployment/08cf920c Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/deployer-token-r7jj8 Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42ecf0000 NetworkSettings:0xc430f2f600} I0125 05:14:00.242404 4678 generic.go:342] PLEG: Write status for postgresql-master-2-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42d1f82a0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:14:00.591899 4678 container_gc.go:249] Removing container "68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2" name "POD" I0125 05:14:00.934002 4678 container_gc.go:249] Removing container "3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28" name "deployment" I0125 05:14:01.068430 4678 audit.go:125] 2017-01-25T05:14:01.068378337-05:00 AUDIT: id="f057ae08-8f4f-4aae-9830-a7c511d248f6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:01.069960 4678 audit.go:45] 2017-01-25T05:14:01.069942048-05:00 AUDIT: id="f057ae08-8f4f-4aae-9830-a7c511d248f6" response="200" I0125 05:14:01.070050 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.422894ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:01.246642 4678 generic.go:145] GenericPLEG: b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/0b1e46b84aab071653291cec3d46ee638a0c03f966eb614f8a7933cdae10475f: exited -> non-existent I0125 05:14:01.246666 4678 generic.go:145] GenericPLEG: b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094/bcd01b6cd0cf9f0a2f63a7790bb6170ba53eb5648dbba3119390dfe60fc07741: exited -> non-existent I0125 05:14:01.246673 4678 generic.go:145] GenericPLEG: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28: exited -> unknown I0125 05:14:01.246685 4678 generic.go:145] GenericPLEG: b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/1122e1bd8a66675372636a6e7bfbeb24e94d87a78066af2dfa701b8ec98a4c48: exited -> non-existent I0125 05:14:01.246694 4678 generic.go:145] GenericPLEG: b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094/68dcb92bc1af0f5d01d6ff08e2c4025bdacdcc96fbf1925bfcecec1cbcdfe1a2: exited -> non-existent I0125 05:14:01.246702 4678 generic.go:333] PLEG: Delete status for pod "b6efaf68-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:14:01.526718 4678 generic.go:342] PLEG: Write status for postgresql-master-2-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-deploy", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus(nil), SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: Error response from daemon: {"message":"devmapper: Unknown device 5d60992435a833968fea0fe2f9d3b46cab616c6233205792c48424d05622679f"}) I0125 05:14:01.526766 4678 kubelet.go:1138] Container garbage collection succeeded E0125 05:14:01.526775 4678 generic.go:238] PLEG: Ignoring events for pod postgresql-master-2-deploy/extended-test-postgresql-replication-1-34bbd-xd4g8: Error response from daemon: {"message":"devmapper: Unknown device 5d60992435a833968fea0fe2f9d3b46cab616c6233205792c48424d05622679f"} I0125 05:14:01.526786 4678 generic.go:333] PLEG: Delete status for pod "b63d7ff7-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:14:01.684605 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:01.943634 4678 audit.go:125] 2017-01-25T05:14:01.943597857-05:00 AUDIT: id="39f6f848-24ec-48fd-8bb9-d514f737fff8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:14:01.944654 4678 audit.go:45] 2017-01-25T05:14:01.944642778-05:00 AUDIT: id="39f6f848-24ec-48fd-8bb9-d514f737fff8" response="200" I0125 05:14:01.944723 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.335308ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:02.068126 4678 audit.go:125] 2017-01-25T05:14:02.068075062-05:00 AUDIT: id="70324448-52fa-41a1-be2f-8ddcf976c297" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:02.069431 4678 audit.go:45] 2017-01-25T05:14:02.069420176-05:00 AUDIT: id="70324448-52fa-41a1-be2f-8ddcf976c297" response="200" I0125 05:14:02.069502 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.977957ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:02.214532 4678 iptables.go:362] running iptables -N [KUBE-MARK-DROP -t nat] I0125 05:14:02.234168 4678 iptables.go:362] running iptables -C [KUBE-MARK-DROP -t nat -j MARK --set-xmark 0x00008000/0x00008000] I0125 05:14:02.253151 4678 iptables.go:362] running iptables -N [KUBE-FIREWALL -t filter] I0125 05:14:02.272007 4678 iptables.go:362] running iptables -C [KUBE-FIREWALL -t filter -m comment --comment kubernetes firewall for dropping marked packets -m mark --mark 0x00008000/0x00008000 -j DROP] I0125 05:14:02.291205 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -j KUBE-FIREWALL] I0125 05:14:02.310099 4678 iptables.go:362] running iptables -C [INPUT -t filter -j KUBE-FIREWALL] I0125 05:14:02.328970 4678 iptables.go:362] running iptables -N [KUBE-MARK-MASQ -t nat] I0125 05:14:02.347941 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:14:02.367498 4678 iptables.go:362] running iptables -C [KUBE-MARK-MASQ -t nat -j MARK --set-xmark 0x00004000/0x00004000] I0125 05:14:02.387468 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:14:02.406222 4678 iptables.go:362] running iptables -C [KUBE-POSTROUTING -t nat -m comment --comment kubernetes service traffic requiring SNAT -m mark --mark 0x00004000/0x00004000 -j MASQUERADE] I0125 05:14:02.485832 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:14:02.486414 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:02.498797 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:02.533338 4678 generic.go:145] GenericPLEG: d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094/3d4f90b50c8d1d7b71cde09219c4e4474c4e695427d8f33da80efbbb047c1e28: exited -> non-existent I0125 05:14:02.533378 4678 generic.go:333] PLEG: Delete status for pod "d71573a5-e2e6-11e6-a4b0-0e6a5cbf0094" I0125 05:14:02.553186 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:02.553215 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:02.904452 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:14:02.904927 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:02.905463 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:02.958008 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:02.958031 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:02.967174 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:02.967200 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:02.967912 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:02.967929 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:02.968879 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:02 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4372343e0 0 [] true false map[] 0xc432b29860 } I0125 05:14:02.968943 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:02.969033 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:02 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc437234500 0 [] true false map[] 0xc432b29680 } I0125 05:14:02.969069 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:03.068224 4678 audit.go:125] 2017-01-25T05:14:03.068161456-05:00 AUDIT: id="c2da7e42-62c9-4b4c-b9c9-649d34e6af3d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:03.069577 4678 audit.go:45] 2017-01-25T05:14:03.069566497-05:00 AUDIT: id="c2da7e42-62c9-4b4c-b9c9-649d34e6af3d" response="200" I0125 05:14:03.069657 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.096194ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:03.378137 4678 audit.go:125] 2017-01-25T05:14:03.378103218-05:00 AUDIT: id="a8dd5cc0-f0bb-4b15-aeb6-8284ece74d94" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:03.378572 4678 audit.go:45] 2017-01-25T05:14:03.378563397-05:00 AUDIT: id="a8dd5cc0-f0bb-4b15-aeb6-8284ece74d94" response="200" I0125 05:14:03.378893 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (998.844µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:03.572743 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:14:03.573094 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (3.12µs) I0125 05:14:03.577304 4678 audit.go:125] 2017-01-25T05:14:03.577258825-05:00 AUDIT: id="89af9512-9c64-4ec5-95c4-6901c5697308" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:14:03.578381 4678 audit.go:45] 2017-01-25T05:14:03.578366372-05:00 AUDIT: id="89af9512-9c64-4ec5-95c4-6901c5697308" response="200" I0125 05:14:03.578466 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (3.712502ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:03.578782 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (5.087431ms) I0125 05:14:03.579140 4678 audit.go:125] 2017-01-25T05:14:03.579102481-05:00 AUDIT: id="26532c2a-73e7-453a-9031-521ce2ff2f17" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:14:03.579441 4678 audit.go:125] 2017-01-25T05:14:03.579413171-05:00 AUDIT: id="4431c265-9c28-47aa-8c5e-09f429167b85" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:14:03.579540 4678 audit.go:125] 2017-01-25T05:14:03.579509116-05:00 AUDIT: id="d0032542-c36d-4374-bdad-cedeac14b3ec" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:14:03.579752 4678 audit.go:125] 2017-01-25T05:14:03.579727151-05:00 AUDIT: id="fce4e71e-bd5c-40be-aa13-1d3a0e68f7db" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:14:03.580678 4678 audit.go:45] 2017-01-25T05:14:03.580664232-05:00 AUDIT: id="fce4e71e-bd5c-40be-aa13-1d3a0e68f7db" response="200" I0125 05:14:03.580748 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (6.64825ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:03.581019 4678 audit.go:45] 2017-01-25T05:14:03.581006-05:00 AUDIT: id="d0032542-c36d-4374-bdad-cedeac14b3ec" response="200" I0125 05:14:03.581065 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (6.381977ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:03.581099 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (8.181945ms) I0125 05:14:03.581193 4678 audit.go:45] 2017-01-25T05:14:03.581181094-05:00 AUDIT: id="4431c265-9c28-47aa-8c5e-09f429167b85" response="200" I0125 05:14:03.581245 4678 audit.go:45] 2017-01-25T05:14:03.58123398-05:00 AUDIT: id="26532c2a-73e7-453a-9031-521ce2ff2f17" response="200" I0125 05:14:03.581256 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (7.050955ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:03.581287 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (6.13234ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:03.581534 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:14:03.581570 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (8.44874ms) I0125 05:14:03.581662 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (8.255991ms) I0125 05:14:03.583443 4678 audit.go:125] 2017-01-25T05:14:03.583419647-05:00 AUDIT: id="9f92c906-42db-41be-99fc-30b74876c5f8" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:14:03.584880 4678 audit.go:45] 2017-01-25T05:14:03.584867052-05:00 AUDIT: id="9f92c906-42db-41be-99fc-30b74876c5f8" response="200" I0125 05:14:03.584942 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.030495ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:03.585153 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (11.588742ms) I0125 05:14:03.585465 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:14:03.585630 4678 proxier.go:804] Syncing iptables rules I0125 05:14:03.585643 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:14:03.594391 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:14:03.594486 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:14:03.594576 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:14:03.594585 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:14:03.594593 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:14:03.594656 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:14:03.594665 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:14:03.594677 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:14:03.594688 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:14:03.604520 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:14:03.623516 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:03.641882 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:03.660677 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:03.671873 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:14:03.679783 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:14:03.689505 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:03.702186 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:14:03.721307 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:14:03.740284 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:14:03.756711 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:14:03.756786 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:14:03.756808 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:14:03.760300 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:14:03.760327 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:14:03.781460 4678 proxier.go:797] syncProxyRules took 195.824544ms I0125 05:14:03.781488 4678 proxier.go:566] OnEndpointsUpdate took 195.953199ms for 6 endpoints I0125 05:14:03.781535 4678 proxier.go:381] Received update notice: [] I0125 05:14:03.781571 4678 proxier.go:804] Syncing iptables rules I0125 05:14:03.781580 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:14:03.800473 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:14:03.812462 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:14:03.819383 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:03.838719 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:03.851481 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:14:03.857645 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:03.876746 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:14:03.896730 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:14:03.915619 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:14:03.936979 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:14:03.956822 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:14:03.956854 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:14:03.977852 4678 proxier.go:797] syncProxyRules took 196.273101ms I0125 05:14:03.977881 4678 proxier.go:431] OnServiceUpdate took 196.334918ms for 4 services I0125 05:14:03.988889 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:14:04.068371 4678 audit.go:125] 2017-01-25T05:14:04.068327685-05:00 AUDIT: id="4d8953ed-0300-4529-97f5-6aa147ab1744" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:04.069785 4678 audit.go:45] 2017-01-25T05:14:04.069773888-05:00 AUDIT: id="4d8953ed-0300-4529-97f5-6aa147ab1744" response="200" I0125 05:14:04.069867 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.252611ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:04.132950 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:14:04.133048 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:14:04.133068 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:04.133096 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:04.133113 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:14:04.133121 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:04.133128 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:04.133190 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:04.133209 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:04.133215 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:04.133220 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:04.133263 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:04.133273 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:14:04.133305 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:14:04.133310 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:04.133322 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:04.133332 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:04.133478 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:14:04.133515 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:14:04.133550 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:14:04.133567 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:04.133572 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:14:04.133586 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:04.133594 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:14:04.133598 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:04.133615 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:05.068346 4678 audit.go:125] 2017-01-25T05:14:05.068306901-05:00 AUDIT: id="f3c632fb-19e1-4e34-ab1e-545d42f86499" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:05.069799 4678 audit.go:45] 2017-01-25T05:14:05.069784896-05:00 AUDIT: id="f3c632fb-19e1-4e34-ab1e-545d42f86499" response="200" I0125 05:14:05.069883 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.30586ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:05.290667 4678 audit.go:125] 2017-01-25T05:14:05.290636034-05:00 AUDIT: id="cbfd1cf8-d22b-4653-8b49-c83c02a7084a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:14:05.291715 4678 audit.go:45] 2017-01-25T05:14:05.291704343-05:00 AUDIT: id="cbfd1cf8-d22b-4653-8b49-c83c02a7084a" response="200" I0125 05:14:05.291778 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.351153ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:05.684613 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:06.068532 4678 audit.go:125] 2017-01-25T05:14:06.06848543-05:00 AUDIT: id="3b7288bf-3546-413e-bcbb-f427b590ee8f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:06.069973 4678 audit.go:45] 2017-01-25T05:14:06.069961154-05:00 AUDIT: id="3b7288bf-3546-413e-bcbb-f427b590ee8f" response="200" I0125 05:14:06.070071 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.480176ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:07.068274 4678 audit.go:125] 2017-01-25T05:14:07.068231725-05:00 AUDIT: id="ad2a9243-5f3f-448c-80de-3ee071a61832" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:07.069509 4678 audit.go:45] 2017-01-25T05:14:07.069497863-05:00 AUDIT: id="ad2a9243-5f3f-448c-80de-3ee071a61832" response="200" I0125 05:14:07.069579 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.979555ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:07.072628 4678 proxier.go:804] Syncing iptables rules I0125 05:14:07.072644 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:14:07.092339 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:14:07.111066 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:07.129951 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:07.148838 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:07.167829 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:14:07.186731 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:14:07.205502 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:14:07.224374 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:14:07.244461 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:14:07.244499 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:14:07.265309 4678 proxier.go:797] syncProxyRules took 192.675217ms I0125 05:14:07.265348 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:14:07.284392 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:14:07.305474 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:14:07.325114 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:14:07.343855 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:14:07.362662 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:14:07.381759 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:14:07.426781 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:14:07.452744 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:14:07.472090 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:14:07.684666 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:07.864547 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:07.864568 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:07.975463 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:14:08.015748 4678 audit.go:125] 2017-01-25T05:14:08.015708379-05:00 AUDIT: id="cd6bb048-08bc-4cab-916a-a1214f084633" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:14:08.016188 4678 audit.go:45] 2017-01-25T05:14:08.0161775-05:00 AUDIT: id="cd6bb048-08bc-4cab-916a-a1214f084633" response="200" I0125 05:14:08.016540 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.047387ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:08.068752 4678 audit.go:125] 2017-01-25T05:14:08.068694638-05:00 AUDIT: id="4659ff75-47bb-48a4-ae8a-57c40a14007a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:08.070431 4678 audit.go:45] 2017-01-25T05:14:08.070413484-05:00 AUDIT: id="4659ff75-47bb-48a4-ae8a-57c40a14007a" response="200" I0125 05:14:08.070533 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.732821ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:08.086163 4678 audit.go:125] 2017-01-25T05:14:08.086129239-05:00 AUDIT: id="b2de021d-9c05-440b-8d84-20f9baa283b9" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:14:08.088573 4678 audit.go:45] 2017-01-25T05:14:08.088557676-05:00 AUDIT: id="b2de021d-9c05-440b-8d84-20f9baa283b9" response="200" I0125 05:14:08.089091 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (3.167315ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:08.089619 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:14:08.380141 4678 audit.go:125] 2017-01-25T05:14:08.380107989-05:00 AUDIT: id="b9edd427-1b8d-4a36-a62f-3fbaebe2a73f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:08.380562 4678 audit.go:45] 2017-01-25T05:14:08.380552777-05:00 AUDIT: id="b9edd427-1b8d-4a36-a62f-3fbaebe2a73f" response="200" I0125 05:14:08.380882 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (984.085µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:08.381139 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:14:08.542298 4678 panics.go:76] GET /apis/extensions/v1beta1/watch/replicasets?resourceVersion=4&timeoutSeconds=490: (8m10.003022109s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:disruption-controller] 172.18.7.222:50846] I0125 05:14:08.542568 4678 reflector.go:392] pkg/controller/disruption/disruption.go:284: Watch close - *extensions.ReplicaSet total 0 items received I0125 05:14:08.544841 4678 audit.go:125] 2017-01-25T05:14:08.54481041-05:00 AUDIT: id="f4bf0f9c-adc5-40e8-88c8-2964e4b92a9e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:disruption-controller" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/watch/replicasets?resourceVersion=4&timeoutSeconds=427" I0125 05:14:08.545185 4678 audit.go:45] 2017-01-25T05:14:08.545175872-05:00 AUDIT: id="f4bf0f9c-adc5-40e8-88c8-2964e4b92a9e" response="200" I0125 05:14:09.068223 4678 audit.go:125] 2017-01-25T05:14:09.068168715-05:00 AUDIT: id="37e8d779-ceb1-40eb-994f-43fac3607a57" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:09.069589 4678 audit.go:45] 2017-01-25T05:14:09.069577895-05:00 AUDIT: id="37e8d779-ceb1-40eb-994f-43fac3607a57" response="200" I0125 05:14:09.069672 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.158966ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:09.251951 4678 audit.go:125] 2017-01-25T05:14:09.251912686-05:00 AUDIT: id="99276986-a313-4092-b93e-258f6ef51875" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:14:09.252825 4678 audit.go:45] 2017-01-25T05:14:09.252814669-05:00 AUDIT: id="99276986-a313-4092-b93e-258f6ef51875" response="200" I0125 05:14:09.252901 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.849136ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:09.253147 4678 controller.go:106] Found 0 cronjobs I0125 05:14:09.254920 4678 audit.go:125] 2017-01-25T05:14:09.25489965-05:00 AUDIT: id="24ff2ab0-6f77-4c67-b0cb-227a1f378c52" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:14:09.255683 4678 audit.go:45] 2017-01-25T05:14:09.255673456-05:00 AUDIT: id="24ff2ab0-6f77-4c67-b0cb-227a1f378c52" response="200" I0125 05:14:09.255743 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.333767ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:09.255955 4678 controller.go:114] Found 0 jobs I0125 05:14:09.255963 4678 controller.go:117] Found 0 groups I0125 05:14:09.260983 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:09.261000 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:09.261676 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:09.261693 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:09.262040 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc437fa3c20 -1 [] true false map[] 0xc43ae3b3b0 } I0125 05:14:09.262071 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:09.262379 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc437ad2680 -1 [] true false map[] 0xc432ea4000 } I0125 05:14:09.262403 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:09.684606 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:10.068099 4678 audit.go:125] 2017-01-25T05:14:10.068058022-05:00 AUDIT: id="452d01f1-6ec2-4db8-a80b-870d9cdbecfa" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:10.069376 4678 audit.go:45] 2017-01-25T05:14:10.069365597-05:00 AUDIT: id="452d01f1-6ec2-4db8-a80b-870d9cdbecfa" response="200" I0125 05:14:10.069456 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.859443ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:10.590232 4678 panics.go:76] GET /oapi/v1/watch/builds?resourceVersion=10222&timeoutSeconds=347: (5m47.00088377s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:14:10.590479 4678 reflector.go:392] github.com/openshift/origin/pkg/build/controller/factory/factory.go:207: Watch close - *api.Build total 8 items received I0125 05:14:10.591079 4678 audit.go:125] 2017-01-25T05:14:10.59104961-05:00 AUDIT: id="7fb1f3ad-7836-4dca-a992-b77f5580c37f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/watch/builds?resourceVersion=10734&timeoutSeconds=457" I0125 05:14:10.591585 4678 audit.go:45] 2017-01-25T05:14:10.591575272-05:00 AUDIT: id="7fb1f3ad-7836-4dca-a992-b77f5580c37f" response="200" I0125 05:14:11.068054 4678 audit.go:125] 2017-01-25T05:14:11.068014987-05:00 AUDIT: id="60976554-ad53-4b91-af4b-1c5cbc8e2300" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:11.069331 4678 audit.go:45] 2017-01-25T05:14:11.069318546-05:00 AUDIT: id="60976554-ad53-4b91-af4b-1c5cbc8e2300" response="200" I0125 05:14:11.069410 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.861528ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:11.684616 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:14:11.684660 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:11.945800 4678 audit.go:125] 2017-01-25T05:14:11.945758316-05:00 AUDIT: id="77ccd448-d885-4b54-ae07-01a19b4212ed" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:14:11.946948 4678 audit.go:45] 2017-01-25T05:14:11.946936865-05:00 AUDIT: id="77ccd448-d885-4b54-ae07-01a19b4212ed" response="200" I0125 05:14:11.947021 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.471871ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:12.068314 4678 audit.go:125] 2017-01-25T05:14:12.068264457-05:00 AUDIT: id="8132e7e5-6fd8-4d0a-b420-7067d75948af" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:12.069696 4678 audit.go:45] 2017-01-25T05:14:12.069684579-05:00 AUDIT: id="8132e7e5-6fd8-4d0a-b420-7067d75948af" response="200" I0125 05:14:12.069776 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.169258ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:12.485820 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:14:12.486242 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:12.498901 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:12.571733 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:12.571764 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:12.904423 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:14:12.904841 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:12.905443 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:12.958256 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:12.958281 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:12.967170 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:12.967196 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:12.967935 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:12.967953 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:12.968635 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:12 GMT] Content-Length:[0]] 0xc42dc7e700 0 [] true false map[] 0xc434cd9b30 } I0125 05:14:12.968685 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:12.968777 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:12 GMT]] 0xc42dc7e880 0 [] true false map[] 0xc42ba66a50 } I0125 05:14:12.968814 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:13.068145 4678 audit.go:125] 2017-01-25T05:14:13.068107059-05:00 AUDIT: id="a7d36fa3-484b-4121-b557-298cdf1338ca" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:13.069454 4678 audit.go:45] 2017-01-25T05:14:13.069443117-05:00 AUDIT: id="a7d36fa3-484b-4121-b557-298cdf1338ca" response="200" I0125 05:14:13.069524 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.984997ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:13.382041 4678 audit.go:125] 2017-01-25T05:14:13.382001421-05:00 AUDIT: id="c30eab11-9855-4fa4-8f54-47df04fd8e13" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:13.382469 4678 audit.go:45] 2017-01-25T05:14:13.382459332-05:00 AUDIT: id="c30eab11-9855-4fa4-8f54-47df04fd8e13" response="200" I0125 05:14:13.382794 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.013849ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:13.684618 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:14.008697 4678 gc_controller.go:175] GC'ing orphaned I0125 05:14:14.008718 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:14:14.068154 4678 audit.go:125] 2017-01-25T05:14:14.06810811-05:00 AUDIT: id="7706aff8-2f50-4aa0-9332-5d25d8d33a38" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:14.069529 4678 audit.go:45] 2017-01-25T05:14:14.069516953-05:00 AUDIT: id="7706aff8-2f50-4aa0-9332-5d25d8d33a38" response="200" I0125 05:14:14.069613 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.05745ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:14.094138 4678 panics.go:76] GET /api/v1/watch/serviceaccounts?resourceVersion=10081&timeoutSeconds=376: (6m16.001089278s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:14.094385 4678 reflector.go:392] github.com/openshift/origin/vendor/k8s.io/kubernetes/plugin/pkg/admission/serviceaccount/admission.go:103: Watch close - *api.ServiceAccount total 69 items received I0125 05:14:14.094983 4678 audit.go:125] 2017-01-25T05:14:14.094948996-05:00 AUDIT: id="46f3df6b-2b6a-4678-9b3d-e4695928cf0b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/serviceaccounts?resourceVersion=10932&timeoutSeconds=450" I0125 05:14:14.095429 4678 audit.go:45] 2017-01-25T05:14:14.095419198-05:00 AUDIT: id="46f3df6b-2b6a-4678-9b3d-e4695928cf0b" response="200" I0125 05:14:15.068193 4678 audit.go:125] 2017-01-25T05:14:15.068143417-05:00 AUDIT: id="fdf588d5-abe1-44b3-b984-bc1be44b2693" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:15.069484 4678 audit.go:45] 2017-01-25T05:14:15.069472614-05:00 AUDIT: id="fdf588d5-abe1-44b3-b984-bc1be44b2693" response="200" I0125 05:14:15.069562 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.9658ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:15.249160 4678 panics.go:76] GET /api/v1/watch/namespaces/default/services?fieldSelector=metadata.name%3Ddocker-registry&resourceVersion=10097&timeoutSeconds=403: (6m43.000885867s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:15.249455 4678 reflector.go:392] github.com/openshift/origin/pkg/serviceaccounts/controllers/docker_registry_service.go:132: Watch close - *api.Service total 0 items received I0125 05:14:15.250069 4678 audit.go:125] 2017-01-25T05:14:15.25003784-05:00 AUDIT: id="f5e278ec-b84d-4aca-9008-095817960237" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/watch/namespaces/default/services?fieldSelector=metadata.name%3Ddocker-registry&resourceVersion=10097&timeoutSeconds=537" I0125 05:14:15.250571 4678 audit.go:45] 2017-01-25T05:14:15.250560371-05:00 AUDIT: id="f5e278ec-b84d-4aca-9008-095817960237" response="200" I0125 05:14:15.297009 4678 audit.go:125] 2017-01-25T05:14:15.296975008-05:00 AUDIT: id="4d6c092c-04f8-432f-971a-8d5f3dc10e98" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:14:15.298013 4678 audit.go:45] 2017-01-25T05:14:15.298001574-05:00 AUDIT: id="4d6c092c-04f8-432f-971a-8d5f3dc10e98" response="200" I0125 05:14:15.298102 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.324396ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:15.684617 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:16.068062 4678 audit.go:125] 2017-01-25T05:14:16.068019517-05:00 AUDIT: id="065b1088-38ff-466b-a25e-77d468f043b2" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:16.069396 4678 audit.go:45] 2017-01-25T05:14:16.069385328-05:00 AUDIT: id="065b1088-38ff-466b-a25e-77d468f043b2" response="200" I0125 05:14:16.069468 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.914744ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:17.068234 4678 audit.go:125] 2017-01-25T05:14:17.068173173-05:00 AUDIT: id="b8c3b5a4-47fd-4699-a8c5-5f1c5d84ef3e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:17.069577 4678 audit.go:45] 2017-01-25T05:14:17.069561187-05:00 AUDIT: id="b8c3b5a4-47fd-4699-a8c5-5f1c5d84ef3e" response="200" I0125 05:14:17.069650 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.120115ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:17.684616 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:17.988151 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:17.988168 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:18.068581 4678 audit.go:125] 2017-01-25T05:14:18.068526402-05:00 AUDIT: id="350d1d67-b4c1-4be2-850f-d3b295801400" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:18.070187 4678 audit.go:45] 2017-01-25T05:14:18.070171614-05:00 AUDIT: id="350d1d67-b4c1-4be2-850f-d3b295801400" response="200" I0125 05:14:18.070296 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.592986ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:18.078800 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:14:18.090522 4678 audit.go:125] 2017-01-25T05:14:18.090483795-05:00 AUDIT: id="eb752fc8-8d0b-4523-ae4d-5a20fe9c86ae" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:14:18.090940 4678 audit.go:45] 2017-01-25T05:14:18.090929925-05:00 AUDIT: id="eb752fc8-8d0b-4523-ae4d-5a20fe9c86ae" response="200" I0125 05:14:18.091281 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.018048ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:18.158702 4678 audit.go:125] 2017-01-25T05:14:18.158672944-05:00 AUDIT: id="71611292-8ae2-4ac1-9ea2-769c7a128f47" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:14:18.160876 4678 audit.go:45] 2017-01-25T05:14:18.160865453-05:00 AUDIT: id="71611292-8ae2-4ac1-9ea2-769c7a128f47" response="200" I0125 05:14:18.161487 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (3.004403ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:18.161839 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:14:18.383957 4678 audit.go:125] 2017-01-25T05:14:18.383921902-05:00 AUDIT: id="1373a87e-1477-4fb2-b0e1-66c69fb34783" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:18.384400 4678 audit.go:45] 2017-01-25T05:14:18.384386979-05:00 AUDIT: id="1373a87e-1477-4fb2-b0e1-66c69fb34783" response="200" I0125 05:14:18.384750 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.054114ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:18.385027 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:14:19.068821 4678 audit.go:125] 2017-01-25T05:14:19.06877441-05:00 AUDIT: id="301a7b39-1f9a-49bc-8870-e943cf9ec55a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:19.070317 4678 audit.go:45] 2017-01-25T05:14:19.070306297-05:00 AUDIT: id="301a7b39-1f9a-49bc-8870-e943cf9ec55a" response="200" I0125 05:14:19.070411 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.799584ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:19.133221 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:14:19.133305 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:14:19.133332 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:19.133354 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:19.133366 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:14:19.133376 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:19.133389 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:19.133456 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:19.133462 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:19.133467 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:19.133472 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:19.133510 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:19.133516 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:14:19.133560 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:14:19.133566 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:19.133578 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:19.133589 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:19.133742 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:14:19.133746 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:14:19.133798 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:14:19.133817 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:19.133823 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:14:19.133833 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:19.133840 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:14:19.133844 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:19.133848 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:19.258715 4678 audit.go:125] 2017-01-25T05:14:19.258667529-05:00 AUDIT: id="d0c4f384-10b7-47b1-90ca-f1f3c0c8cc5f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:14:19.259596 4678 audit.go:45] 2017-01-25T05:14:19.259585654-05:00 AUDIT: id="d0c4f384-10b7-47b1-90ca-f1f3c0c8cc5f" response="200" I0125 05:14:19.259672 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.01893ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:19.259962 4678 controller.go:106] Found 0 cronjobs I0125 05:14:19.261171 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:19.261211 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:19.261931 4678 audit.go:125] 2017-01-25T05:14:19.261897837-05:00 AUDIT: id="1f8a7c72-4a9f-4496-b91d-19435588d504" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:14:19.262067 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:19.262084 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:19.262607 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc4387761a0 -1 [] true false map[] 0xc42aa5af00 } I0125 05:14:19.262653 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:19.263085 4678 audit.go:45] 2017-01-25T05:14:19.263075056-05:00 AUDIT: id="1f8a7c72-4a9f-4496-b91d-19435588d504" response="200" I0125 05:14:19.263158 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.967088ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:19.263200 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc4376470c0 -1 [] true false map[] 0xc42aa5b1d0 } I0125 05:14:19.263233 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:19.263387 4678 controller.go:114] Found 0 jobs I0125 05:14:19.263394 4678 controller.go:117] Found 0 groups I0125 05:14:19.684633 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:20.068268 4678 audit.go:125] 2017-01-25T05:14:20.068214867-05:00 AUDIT: id="e5b867a2-3713-44f8-95be-bff8556914cd" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:20.069598 4678 audit.go:45] 2017-01-25T05:14:20.069586462-05:00 AUDIT: id="e5b867a2-3713-44f8-95be-bff8556914cd" response="200" I0125 05:14:20.069671 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.104166ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:21.068292 4678 audit.go:125] 2017-01-25T05:14:21.068252875-05:00 AUDIT: id="25f08b43-5e01-46b4-8723-953dd6202aac" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:21.069641 4678 audit.go:45] 2017-01-25T05:14:21.0696298-05:00 AUDIT: id="25f08b43-5e01-46b4-8723-953dd6202aac" response="200" I0125 05:14:21.069712 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.110208ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:21.684604 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:21.948133 4678 audit.go:125] 2017-01-25T05:14:21.948098215-05:00 AUDIT: id="bde34631-f49c-4886-8d63-7c558bfc0d3d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:14:21.949265 4678 audit.go:45] 2017-01-25T05:14:21.949253937-05:00 AUDIT: id="bde34631-f49c-4886-8d63-7c558bfc0d3d" response="200" I0125 05:14:21.949910 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (2.022828ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:22.068146 4678 audit.go:125] 2017-01-25T05:14:22.068108121-05:00 AUDIT: id="16ea1a6c-3117-4377-8c7a-c687233e621c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:22.069414 4678 audit.go:45] 2017-01-25T05:14:22.069402798-05:00 AUDIT: id="16ea1a6c-3117-4377-8c7a-c687233e621c" response="200" I0125 05:14:22.069486 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.881618ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:22.485855 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:14:22.486289 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:22.498926 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:22.577723 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:22.577761 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:22.904466 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:14:22.904806 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:22.905441 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:22.961158 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:22.961180 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:22.967133 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:22.967156 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:22.968381 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:22.968397 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:22.968674 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Date:[Wed, 25 Jan 2017 10:14:22 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache]] 0xc432e82d80 0 [] true false map[] 0xc42e9f23c0 } I0125 05:14:22.968737 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:22.969153 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:22 GMT] Content-Length:[0]] 0xc432e82fa0 0 [] true false map[] 0xc42e9f2690 } I0125 05:14:22.969189 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:23.068424 4678 audit.go:125] 2017-01-25T05:14:23.068376775-05:00 AUDIT: id="52ec6e66-d1a4-4ec8-ac9c-fc685c3bb5c7" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:23.069863 4678 audit.go:45] 2017-01-25T05:14:23.069852228-05:00 AUDIT: id="52ec6e66-d1a4-4ec8-ac9c-fc685c3bb5c7" response="200" I0125 05:14:23.069942 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.372932ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:23.385965 4678 audit.go:125] 2017-01-25T05:14:23.385933951-05:00 AUDIT: id="f8a79586-2b87-4164-9638-af73b22fa728" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:23.386387 4678 audit.go:45] 2017-01-25T05:14:23.386378405-05:00 AUDIT: id="f8a79586-2b87-4164-9638-af73b22fa728" response="200" I0125 05:14:23.386710 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.005346ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:23.684638 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:24.068087 4678 audit.go:125] 2017-01-25T05:14:24.068048566-05:00 AUDIT: id="eb147e75-3b45-4d13-8813-66f433ca0080" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:24.069433 4678 audit.go:45] 2017-01-25T05:14:24.06942089-05:00 AUDIT: id="eb147e75-3b45-4d13-8813-66f433ca0080" response="200" I0125 05:14:24.069510 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.927343ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:25.077634 4678 audit.go:125] 2017-01-25T05:14:25.077568236-05:00 AUDIT: id="4bf3f82f-e206-41f8-9369-6e52b11616ba" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:25.079662 4678 audit.go:45] 2017-01-25T05:14:25.079639613-05:00 AUDIT: id="4bf3f82f-e206-41f8-9369-6e52b11616ba" response="200" I0125 05:14:25.079771 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (9.651177ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:25.303477 4678 audit.go:125] 2017-01-25T05:14:25.303442311-05:00 AUDIT: id="c3972681-7fd5-47e6-9f96-6e946d0346f1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:14:25.304573 4678 audit.go:45] 2017-01-25T05:14:25.304556395-05:00 AUDIT: id="c3972681-7fd5-47e6-9f96-6e946d0346f1" response="200" I0125 05:14:25.304638 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.411581ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:25.684623 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:26.068354 4678 audit.go:125] 2017-01-25T05:14:26.068312772-05:00 AUDIT: id="2df9e0ca-1388-4696-8151-c36a15dbe6df" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:26.069788 4678 audit.go:45] 2017-01-25T05:14:26.069770056-05:00 AUDIT: id="2df9e0ca-1388-4696-8151-c36a15dbe6df" response="200" I0125 05:14:26.069871 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.293044ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:26.523425 4678 panics.go:76] GET /apis/extensions/v1beta1/watch/deployments?resourceVersion=4&timeoutSeconds=428: (7m8.003094656s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:disruption-controller] 172.18.7.222:50846] I0125 05:14:26.523721 4678 reflector.go:392] pkg/controller/disruption/disruption.go:285: Watch close - *extensions.Deployment total 0 items received I0125 05:14:26.525960 4678 audit.go:125] 2017-01-25T05:14:26.525915966-05:00 AUDIT: id="6c2efb9b-2390-4109-9e95-00d0a387f323" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:disruption-controller" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/watch/deployments?resourceVersion=4&timeoutSeconds=352" I0125 05:14:26.526347 4678 audit.go:45] 2017-01-25T05:14:26.526333204-05:00 AUDIT: id="6c2efb9b-2390-4109-9e95-00d0a387f323" response="200" 2017-01-25 05:14:26.959769 I | mvcc: store.index: compact 10478 I0125 05:14:26.961460 4678 compact.go:159] etcd: compacted rev (10478), endpoints ([https://172.18.7.222:4001]) 2017-01-25 05:14:26.962131 I | mvcc: finished scheduled compaction at 10478 (took 906.262µs) I0125 05:14:27.068347 4678 audit.go:125] 2017-01-25T05:14:27.06830736-05:00 AUDIT: id="72818194-17fb-4fb0-890f-3b412e1cbe26" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:27.069661 4678 audit.go:45] 2017-01-25T05:14:27.069648351-05:00 AUDIT: id="72818194-17fb-4fb0-890f-3b412e1cbe26" response="200" I0125 05:14:27.069729 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.112055ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:27.684657 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:28.068110 4678 audit.go:125] 2017-01-25T05:14:28.068067839-05:00 AUDIT: id="f2b529cf-ddae-4a0c-ab95-51a3e0dec2e6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:28.069357 4678 audit.go:45] 2017-01-25T05:14:28.069342362-05:00 AUDIT: id="f2b529cf-ddae-4a0c-ab95-51a3e0dec2e6" response="200" I0125 05:14:28.069437 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.840226ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:28.099006 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:28.099026 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:28.163288 4678 audit.go:125] 2017-01-25T05:14:28.163236272-05:00 AUDIT: id="bb08d524-a6d3-4263-aaec-5b58e834abf0" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:14:28.163815 4678 audit.go:45] 2017-01-25T05:14:28.163797849-05:00 AUDIT: id="bb08d524-a6d3-4263-aaec-5b58e834abf0" response="200" I0125 05:14:28.164178 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.258336ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:28.201144 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:14:28.243844 4678 audit.go:125] 2017-01-25T05:14:28.243812945-05:00 AUDIT: id="597fe98a-1278-49d6-a861-bc684345f959" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:14:28.245880 4678 audit.go:45] 2017-01-25T05:14:28.245860909-05:00 AUDIT: id="597fe98a-1278-49d6-a861-bc684345f959" response="200" I0125 05:14:28.246166 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.560261ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:28.247073 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:14:28.387956 4678 audit.go:125] 2017-01-25T05:14:28.38792552-05:00 AUDIT: id="0e1b54c5-464f-4285-97e5-280460bcc1bb" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:28.388389 4678 audit.go:45] 2017-01-25T05:14:28.388379728-05:00 AUDIT: id="0e1b54c5-464f-4285-97e5-280460bcc1bb" response="200" I0125 05:14:28.388670 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (942.621µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:28.389008 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:14:29.068169 4678 audit.go:125] 2017-01-25T05:14:29.068123874-05:00 AUDIT: id="33c48c8f-19ef-4c9a-a611-fda6125ba44e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:29.069500 4678 audit.go:45] 2017-01-25T05:14:29.069487867-05:00 AUDIT: id="33c48c8f-19ef-4c9a-a611-fda6125ba44e" response="200" I0125 05:14:29.069579 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.047893ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:29.260985 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:29.261013 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:29.261676 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:29.261693 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:29.263361 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc436e93c20 -1 [] true false map[] 0xc430cf43c0 } I0125 05:14:29.263400 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:29.263353 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42df8f8a0 -1 [] true false map[] 0xc42e52dd10 } I0125 05:14:29.263427 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:29.265713 4678 audit.go:125] 2017-01-25T05:14:29.265685121-05:00 AUDIT: id="2f99e337-07aa-466d-a5ce-df1d0c27708b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:14:29.266558 4678 audit.go:45] 2017-01-25T05:14:29.266547164-05:00 AUDIT: id="2f99e337-07aa-466d-a5ce-df1d0c27708b" response="200" I0125 05:14:29.266627 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.724381ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:29.266860 4678 controller.go:106] Found 0 cronjobs I0125 05:14:29.268698 4678 audit.go:125] 2017-01-25T05:14:29.26867935-05:00 AUDIT: id="2f68d66f-5be9-48ca-8aa4-76ce4dd82f66" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:14:29.269511 4678 audit.go:45] 2017-01-25T05:14:29.269501294-05:00 AUDIT: id="2f68d66f-5be9-48ca-8aa4-76ce4dd82f66" response="200" I0125 05:14:29.269575 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.474015ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:29.269771 4678 controller.go:114] Found 0 jobs I0125 05:14:29.269778 4678 controller.go:117] Found 0 groups I0125 05:14:29.684601 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:30.069513 4678 audit.go:125] 2017-01-25T05:14:30.069469395-05:00 AUDIT: id="8ba7a783-c97b-4380-9301-bdf2f49acb68" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:30.070841 4678 audit.go:45] 2017-01-25T05:14:30.070827681-05:00 AUDIT: id="8ba7a783-c97b-4380-9301-bdf2f49acb68" response="200" I0125 05:14:30.070930 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.573721ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:30.684639 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:14:30.684708 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:30.684866 4678 status_manager.go:312] Ignoring same status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:02 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:12 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:2017-01-25 05:13:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc42ef88b20 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d}]} I0125 05:14:30.684960 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:30.736784 4678 audit.go:125] 2017-01-25T05:14:30.736747058-05:00 AUDIT: id="93bd99cd-7a6d-41e6-b8c4-062354242c89" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:14:30.737942 4678 secret.go:179] Setting up volume default-token-0g2nw for pod daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:14:30.738011 4678 audit.go:45] 2017-01-25T05:14:30.737997302-05:00 AUDIT: id="93bd99cd-7a6d-41e6-b8c4-062354242c89" response="200" I0125 05:14:30.738118 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (1.579693ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:30.738645 4678 audit.go:125] 2017-01-25T05:14:30.738612499-05:00 AUDIT: id="11079ab5-d9be-42c5-a55d-82075b1c5fed" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:14:30.738903 4678 audit.go:125] 2017-01-25T05:14:30.738875368-05:00 AUDIT: id="5afed09d-4023-480d-a5bd-908123504e49" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:30.739800 4678 audit.go:45] 2017-01-25T05:14:30.739786395-05:00 AUDIT: id="11079ab5-d9be-42c5-a55d-82075b1c5fed" response="200" I0125 05:14:30.739801 4678 audit.go:45] 2017-01-25T05:14:30.739792224-05:00 AUDIT: id="5afed09d-4023-480d-a5bd-908123504e49" response="200" I0125 05:14:30.739949 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (1.307907ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:30.740129 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (1.779571ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:30.740289 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:14:30.740489 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:14:30.740590 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:14:30.985209 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:30.985854 4678 audit.go:125] 2017-01-25T05:14:30.985816584-05:00 AUDIT: id="3e517f94-ad67-41c4-a497-039693308f9a" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:14:30.986974 4678 audit.go:45] 2017-01-25T05:14:30.986961507-05:00 AUDIT: id="3e517f94-ad67-41c4-a497-039693308f9a" response="200" I0125 05:14:30.987140 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (1.527619ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:30.987379 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:30.987441 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:30.987454 4678 docker_manager.go:1999] pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master" exists as ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d I0125 05:14:30.987553 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650:-1 ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d:0]} I0125 05:14:31.068094 4678 audit.go:125] 2017-01-25T05:14:31.06805333-05:00 AUDIT: id="891b00d9-3e21-4791-9a7f-1552efdcc8a5" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:31.069497 4678 audit.go:45] 2017-01-25T05:14:31.069485804-05:00 AUDIT: id="891b00d9-3e21-4791-9a7f-1552efdcc8a5" response="200" I0125 05:14:31.069577 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.042372ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:31.684622 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:14:31.684668 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:31.684818 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:31.685025 4678 status_manager.go:312] Ignoring same status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:25 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.8 StartTime:2017-01-25 05:12:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running:0xc42da7f3e0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} I0125 05:14:31.685154 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:31.740092 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:14:31.740840 4678 audit.go:125] 2017-01-25T05:14:31.740804647-05:00 AUDIT: id="ee4f923e-ee7e-4a82-8360-77211952bdae" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:14:31.741979 4678 audit.go:45] 2017-01-25T05:14:31.741968411-05:00 AUDIT: id="ee4f923e-ee7e-4a82-8360-77211952bdae" response="200" I0125 05:14:31.742258 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (1.673787ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:31.742462 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:14:31.742653 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:14:31.742748 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:14:31.951110 4678 audit.go:125] 2017-01-25T05:14:31.951072475-05:00 AUDIT: id="9a0e6b3c-f4d5-414b-ac3e-65cd7926617c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:14:31.952250 4678 audit.go:45] 2017-01-25T05:14:31.952238971-05:00 AUDIT: id="9a0e6b3c-f4d5-414b-ac3e-65cd7926617c" response="200" I0125 05:14:31.952326 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.46089ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:31.985358 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:31.986014 4678 audit.go:125] 2017-01-25T05:14:31.985975301-05:00 AUDIT: id="6c35d0bf-843b-44bf-97cb-38e9272224f0" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:14:31.987169 4678 audit.go:45] 2017-01-25T05:14:31.987157685-05:00 AUDIT: id="6c35d0bf-843b-44bf-97cb-38e9272224f0" response="200" I0125 05:14:31.987373 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (1.60596ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:31.987600 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:31.987671 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:31.987686 4678 docker_manager.go:1999] pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql" exists as 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208 I0125 05:14:31.987808 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017:-1 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208:0]} I0125 05:14:32.068511 4678 audit.go:125] 2017-01-25T05:14:32.068466595-05:00 AUDIT: id="37d4faa2-f3e9-484a-8e2b-74b12531a632" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:32.069977 4678 audit.go:45] 2017-01-25T05:14:32.069964104-05:00 AUDIT: id="37d4faa2-f3e9-484a-8e2b-74b12531a632" response="200" I0125 05:14:32.070068 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.489658ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:32.485780 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:14:32.486098 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:32.498773 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:32.552934 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:32.552975 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:32.904447 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:14:32.905415 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:32.906340 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:32.967678 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:32.967710 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:32.968273 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:32.968289 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:32.969140 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42a580900 0 [] true false map[] 0xc430cc7590 } I0125 05:14:32.969189 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:32.969277 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42a580a20 0 [] true false map[] 0xc430cc7770 } I0125 05:14:32.969321 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:32.978035 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:32.978077 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:33.068188 4678 audit.go:125] 2017-01-25T05:14:33.068130883-05:00 AUDIT: id="b8ef4077-75dd-4f8c-8aa0-130dae869241" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:33.069616 4678 audit.go:45] 2017-01-25T05:14:33.069602387-05:00 AUDIT: id="b8ef4077-75dd-4f8c-8aa0-130dae869241" response="200" I0125 05:14:33.069690 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.168173ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:33.389942 4678 audit.go:125] 2017-01-25T05:14:33.389908186-05:00 AUDIT: id="44efee27-c609-44df-9d7f-e124e3cfc0eb" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:33.390356 4678 audit.go:45] 2017-01-25T05:14:33.39034661-05:00 AUDIT: id="44efee27-c609-44df-9d7f-e124e3cfc0eb" response="200" I0125 05:14:33.390682 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (981.5µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:33.502349 4678 reflector.go:273] pkg/controller/namespace/namespace_controller.go:212: forcing resync I0125 05:14:33.555535 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:263: forcing resync I0125 05:14:33.555535 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:145: forcing resync I0125 05:14:33.562812 4678 resource_quota_controller.go:153] Resource quota controller queued all resource quota for full calculation of usage I0125 05:14:33.562830 4678 reflector.go:273] pkg/controller/resourcequota/resource_quota_controller.go:229: forcing resync I0125 05:14:33.573033 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:14:33.573268 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (2.047µs) I0125 05:14:33.577553 4678 audit.go:125] 2017-01-25T05:14:33.577497022-05:00 AUDIT: id="d5db6b43-6341-4cf2-ad13-f5e9f8221294" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:14:33.578639 4678 audit.go:45] 2017-01-25T05:14:33.578621524-05:00 AUDIT: id="d5db6b43-6341-4cf2-ad13-f5e9f8221294" response="200" I0125 05:14:33.578734 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (3.58557ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:33.579099 4678 audit.go:125] 2017-01-25T05:14:33.57906682-05:00 AUDIT: id="4b24767f-1ec8-4677-bfda-198cc6c15fbf" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:14:33.579382 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (5.898911ms) I0125 05:14:33.579533 4678 audit.go:125] 2017-01-25T05:14:33.579498758-05:00 AUDIT: id="ae52d7da-c945-41ff-b3fb-bc217c57dea8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:14:33.579533 4678 audit.go:125] 2017-01-25T05:14:33.579503504-05:00 AUDIT: id="0ad4297e-83a2-4b9a-85a3-56278e57fa02" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:14:33.579797 4678 audit.go:125] 2017-01-25T05:14:33.579770962-05:00 AUDIT: id="5ca02d73-38f8-4597-b9b6-e17dcec7c1f2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:14:33.580977 4678 audit.go:45] 2017-01-25T05:14:33.580951188-05:00 AUDIT: id="5ca02d73-38f8-4597-b9b6-e17dcec7c1f2" response="200" I0125 05:14:33.581006 4678 audit.go:45] 2017-01-25T05:14:33.580995604-05:00 AUDIT: id="ae52d7da-c945-41ff-b3fb-bc217c57dea8" response="200" I0125 05:14:33.581060 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (6.346167ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:33.581093 4678 audit.go:45] 2017-01-25T05:14:33.581082144-05:00 AUDIT: id="0ad4297e-83a2-4b9a-85a3-56278e57fa02" response="200" I0125 05:14:33.581135 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (6.489891ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:33.581244 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (7.09468ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:33.581341 4678 audit.go:45] 2017-01-25T05:14:33.581329325-05:00 AUDIT: id="4b24767f-1ec8-4677-bfda-198cc6c15fbf" response="200" I0125 05:14:33.581386 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (7.336923ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:33.581578 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (8.388787ms) I0125 05:14:33.581579 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (7.937309ms) I0125 05:14:33.581639 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:14:33.581671 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (8.375458ms) I0125 05:14:33.583702 4678 audit.go:125] 2017-01-25T05:14:33.583659628-05:00 AUDIT: id="a7ead6ad-6840-4246-ae3d-043d7c0c4c58" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:14:33.585231 4678 audit.go:45] 2017-01-25T05:14:33.585220307-05:00 AUDIT: id="a7ead6ad-6840-4246-ae3d-043d7c0c4c58" response="200" I0125 05:14:33.585281 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.270355ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:14:33.585508 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (11.925542ms) I0125 05:14:33.585855 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:14:33.586018 4678 proxier.go:804] Syncing iptables rules I0125 05:14:33.586030 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:14:33.595037 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:14:33.595210 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:14:33.595240 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:14:33.595266 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:14:33.595291 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:14:33.595311 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:14:33.595340 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:14:33.595376 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:14:33.595404 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:14:33.605770 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:14:33.624523 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:33.643594 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:33.664504 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:33.673239 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:14:33.683551 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:14:33.693398 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:33.705819 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:14:33.724669 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:14:33.744002 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:14:33.756969 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:14:33.757158 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:14:33.757209 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:14:33.764301 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:14:33.764336 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:14:33.784864 4678 reflector.go:273] github.com/openshift/origin/pkg/project/controller/factory.go:36: forcing resync I0125 05:14:33.784872 4678 proxier.go:797] syncProxyRules took 198.854159ms I0125 05:14:33.784899 4678 proxier.go:566] OnEndpointsUpdate took 198.96447ms for 6 endpoints I0125 05:14:33.784955 4678 proxier.go:381] Received update notice: [] I0125 05:14:33.784998 4678 proxier.go:804] Syncing iptables rules I0125 05:14:33.785009 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:14:33.803631 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:14:33.812847 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:14:33.822158 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:33.840503 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:33.851932 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:14:33.859827 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:33.878403 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:14:33.897131 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:14:33.916573 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:14:33.937169 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:14:33.957387 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:14:33.957421 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:14:33.977884 4678 proxier.go:797] syncProxyRules took 192.877294ms I0125 05:14:33.977915 4678 proxier.go:431] OnServiceUpdate took 192.945209ms for 4 services I0125 05:14:33.989093 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:14:34.009143 4678 gc_controller.go:175] GC'ing orphaned I0125 05:14:34.009162 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:14:34.068473 4678 audit.go:125] 2017-01-25T05:14:34.068434456-05:00 AUDIT: id="789cfbbe-7fe6-40fa-bc49-d06d9c2e841d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:34.069946 4678 audit.go:45] 2017-01-25T05:14:34.069934636-05:00 AUDIT: id="789cfbbe-7fe6-40fa-bc49-d06d9c2e841d" response="200" I0125 05:14:34.070027 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.399559ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:34.133485 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:14:34.133553 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:14:34.133577 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:34.133599 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:34.133608 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:14:34.133616 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:34.133626 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:34.133711 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:34.133728 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:34.133733 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:34.133737 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:34.133765 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:34.133772 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:14:34.133811 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:14:34.133820 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:34.133839 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:34.133848 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:14:34.133854 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:34.133870 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:14:34.133899 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:14:34.133918 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:34.133928 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:14:34.133938 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:34.133946 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:14:34.133950 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:34.133955 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:35.068182 4678 audit.go:125] 2017-01-25T05:14:35.068140898-05:00 AUDIT: id="c66b1feb-d63f-4819-b5ad-41e4f09c1879" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:35.069502 4678 audit.go:45] 2017-01-25T05:14:35.069488332-05:00 AUDIT: id="c66b1feb-d63f-4819-b5ad-41e4f09c1879" response="200" I0125 05:14:35.069573 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.008838ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:35.143669 4678 panics.go:76] GET /api/v1/watch/events?fieldSelector=reason%3DNeedPods&resourceVersion=10191&timeoutSeconds=465: (7m45.002175742s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:unidling-controller] 172.18.7.222:50846] I0125 05:14:35.144034 4678 reflector.go:392] github.com/openshift/origin/pkg/unidling/controller/controller.go:195: Watch close - *api.Event total 0 items received I0125 05:14:35.146415 4678 audit.go:125] 2017-01-25T05:14:35.146381898-05:00 AUDIT: id="961aa3d3-290b-4237-916c-e1631bf53ecb" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:unidling-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/events?fieldSelector=reason%3DNeedPods&resourceVersion=10191&timeoutSeconds=532" I0125 05:14:35.146779 4678 audit.go:45] 2017-01-25T05:14:35.146767258-05:00 AUDIT: id="961aa3d3-290b-4237-916c-e1631bf53ecb" response="200" E0125 05:14:35.176174 4678 watcher.go:186] watch chan error: etcdserver: mvcc: required revision has been compacted W0125 05:14:35.176508 4678 reflector.go:319] github.com/openshift/origin/pkg/unidling/controller/controller.go:195: watch of *api.Event ended with: etcdserver: mvcc: required revision has been compacted I0125 05:14:35.176518 4678 panics.go:76] GET /api/v1/watch/events?fieldSelector=reason%3DNeedPods&resourceVersion=10191&timeoutSeconds=532: (31.898339ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:unidling-controller] 172.18.7.222:50846] I0125 05:14:35.309714 4678 audit.go:125] 2017-01-25T05:14:35.309681485-05:00 AUDIT: id="6e066e46-99da-48b4-9516-e68122cce66d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:14:35.310692 4678 audit.go:45] 2017-01-25T05:14:35.310678333-05:00 AUDIT: id="6e066e46-99da-48b4-9516-e68122cce66d" response="200" I0125 05:14:35.310772 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.316395ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:35.684614 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:36.068235 4678 audit.go:125] 2017-01-25T05:14:36.068185662-05:00 AUDIT: id="aaae3b7f-2301-41a0-9be6-1f9aa437b310" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:36.069536 4678 audit.go:45] 2017-01-25T05:14:36.069524885-05:00 AUDIT: id="aaae3b7f-2301-41a0-9be6-1f9aa437b310" response="200" I0125 05:14:36.069604 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.018702ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:36.176728 4678 reflector.go:234] Listing and watching *api.Event from github.com/openshift/origin/pkg/unidling/controller/controller.go:195 I0125 05:14:36.179191 4678 audit.go:125] 2017-01-25T05:14:36.179135949-05:00 AUDIT: id="35d92fda-2aec-4758-81cc-0617a6c0a9b3" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:unidling-controller" as="" asgroups="" namespace="" uri="/api/v1/events?fieldSelector=reason%3DNeedPods&resourceVersion=0" I0125 05:14:36.182210 4678 audit.go:45] 2017-01-25T05:14:36.182186587-05:00 AUDIT: id="35d92fda-2aec-4758-81cc-0617a6c0a9b3" response="200" I0125 05:14:36.182277 4678 panics.go:76] GET /api/v1/events?fieldSelector=reason%3DNeedPods&resourceVersion=0: (5.01584ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:unidling-controller] 172.18.7.222:50846] I0125 05:14:36.184302 4678 audit.go:125] 2017-01-25T05:14:36.184259059-05:00 AUDIT: id="5a4e0a4e-8561-4080-a7ab-385d138f06a2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:unidling-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/events?fieldSelector=reason%3DNeedPods&resourceVersion=11189&timeoutSeconds=398" I0125 05:14:36.184638 4678 audit.go:45] 2017-01-25T05:14:36.184625363-05:00 AUDIT: id="5a4e0a4e-8561-4080-a7ab-385d138f06a2" response="200" I0125 05:14:36.797021 4678 worker.go:45] 0 Health Check Listeners I0125 05:14:36.797050 4678 worker.go:46] 4 Services registered for health checking I0125 05:14:36.797057 4678 worker.go:50] Service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper has 1 local endpoints I0125 05:14:36.797063 4678 worker.go:50] Service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master has 1 local endpoints I0125 05:14:36.797067 4678 worker.go:50] Service default/docker-registry has 1 local endpoints I0125 05:14:36.797071 4678 worker.go:50] Service default/router has 1 local endpoints I0125 05:14:37.068159 4678 audit.go:125] 2017-01-25T05:14:37.068121021-05:00 AUDIT: id="54a52f5e-8659-40d2-9957-ce26d1cc0837" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:37.069565 4678 audit.go:45] 2017-01-25T05:14:37.069553937-05:00 AUDIT: id="54a52f5e-8659-40d2-9957-ce26d1cc0837" response="200" I0125 05:14:37.069639 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.060669ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:37.072627 4678 proxier.go:804] Syncing iptables rules I0125 05:14:37.072640 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:14:37.091635 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:14:37.110102 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:37.128438 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:37.147206 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:14:37.166136 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:14:37.184717 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:14:37.203222 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:14:37.221869 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:14:37.241486 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:14:37.241525 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:14:37.262214 4678 proxier.go:797] syncProxyRules took 189.567581ms I0125 05:14:37.262259 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:14:37.280780 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:14:37.299499 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:14:37.317825 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:14:37.336424 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:14:37.357479 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:14:37.377495 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:14:37.396178 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:14:37.415504 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:14:37.434039 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:14:37.611735 4678 kubelet.go:1155] Image garbage collection succeeded I0125 05:14:37.684602 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:37.957617 4678 oom_linux.go:64] attempting to set "/proc/2279/oom_score_adj" to "-999" I0125 05:14:37.957799 4678 oom_linux.go:64] attempting to set "/proc/2294/oom_score_adj" to "-999" I0125 05:14:37.957883 4678 container_manager_linux.go:434] Discovered runtime cgroups name: /system.slice/docker.service I0125 05:14:37.957902 4678 oom_linux.go:64] attempting to set "/proc/4678/oom_score_adj" to "-999" W0125 05:14:37.957944 4678 container_manager_linux.go:728] CPUAccounting not enabled for pid: 4678 W0125 05:14:37.957959 4678 container_manager_linux.go:731] MemoryAccounting not enabled for pid: 4678 I0125 05:14:38.068442 4678 audit.go:125] 2017-01-25T05:14:38.06840398-05:00 AUDIT: id="fa0ca3b2-59e8-47fd-98db-742ecfab95b1" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:38.069890 4678 audit.go:45] 2017-01-25T05:14:38.069878102-05:00 AUDIT: id="fa0ca3b2-59e8-47fd-98db-742ecfab95b1" response="200" I0125 05:14:38.069979 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.374879ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:38.145065 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b76687cc\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:14:38.145088 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:14:38.145101 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:14:38.145113 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:14:38.145144 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-server\x2dcertificate.mount: invalid container name I0125 05:14:38.145148 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:14:38.145155 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount", but ignoring. I0125 05:14:38.145167 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:14:38.145178 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-debug.mount: invalid container name I0125 05:14:38.145181 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-debug.mount" I0125 05:14:38.145186 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-debug.mount", but ignoring. I0125 05:14:38.145192 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-debug.mount" I0125 05:14:38.145210 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-hugepages.mount: invalid container name I0125 05:14:38.145214 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-hugepages.mount" I0125 05:14:38.145218 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-hugepages.mount", but ignoring. I0125 05:14:38.145224 4678 manager.go:867] ignoring container "/system.slice/dev-hugepages.mount" I0125 05:14:38.145257 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-daedc0da\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:14:38.145260 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:14:38.145270 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:14:38.145279 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:14:38.145291 4678 factory.go:104] Error trying to work out if we can handle /system.slice/-.mount: invalid container name I0125 05:14:38.145294 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/-.mount" I0125 05:14:38.145298 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/-.mount", but ignoring. I0125 05:14:38.145303 4678 manager.go:867] ignoring container "/system.slice/-.mount" I0125 05:14:38.145309 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-mqueue.mount: invalid container name I0125 05:14:38.145312 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-mqueue.mount" I0125 05:14:38.145316 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-mqueue.mount", but ignoring. I0125 05:14:38.145321 4678 manager.go:867] ignoring container "/system.slice/dev-mqueue.mount" I0125 05:14:38.145347 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-e932e61a\x2de2d9\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-registry\x2dtoken\x2dvjbst.mount: invalid container name I0125 05:14:38.145350 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:14:38.145357 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount", but ignoring. I0125 05:14:38.145366 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:14:38.145379 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir.mount: invalid container name I0125 05:14:38.145382 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:14:38.145386 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount", but ignoring. I0125 05:14:38.145392 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:14:38.145399 4678 factory.go:104] Error trying to work out if we can handle /system.slice/run-user-1000.mount: invalid container name I0125 05:14:38.145402 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/run-user-1000.mount" I0125 05:14:38.145406 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/run-user-1000.mount", but ignoring. I0125 05:14:38.145414 4678 manager.go:867] ignoring container "/system.slice/run-user-1000.mount" I0125 05:14:38.145439 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-router\x2dtoken\x2ds79l8.mount: invalid container name I0125 05:14:38.145442 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:14:38.145450 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount", but ignoring. I0125 05:14:38.145458 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:14:38.145469 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-config.mount: invalid container name I0125 05:14:38.145472 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-config.mount" I0125 05:14:38.145476 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-config.mount", but ignoring. I0125 05:14:38.145482 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-config.mount" I0125 05:14:38.203552 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:38.203574 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:38.247924 4678 audit.go:125] 2017-01-25T05:14:38.247872658-05:00 AUDIT: id="96685e7b-c147-440a-a4fa-d2f4055fefae" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:14:38.248447 4678 audit.go:45] 2017-01-25T05:14:38.248432766-05:00 AUDIT: id="96685e7b-c147-440a-a4fa-d2f4055fefae" response="200" I0125 05:14:38.248820 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.251312ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:38.347984 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:14:38.354583 4678 audit.go:125] 2017-01-25T05:14:38.354550765-05:00 AUDIT: id="610db6c4-d8b8-4b17-bf6d-97839779af2d" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:14:38.356730 4678 audit.go:45] 2017-01-25T05:14:38.356715719-05:00 AUDIT: id="610db6c4-d8b8-4b17-bf6d-97839779af2d" response="200" I0125 05:14:38.357032 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.687937ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:38.357964 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:14:38.391888 4678 audit.go:125] 2017-01-25T05:14:38.391855893-05:00 AUDIT: id="8276142b-bbd2-4abf-b667-b4542fabc182" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:38.392305 4678 audit.go:45] 2017-01-25T05:14:38.392295774-05:00 AUDIT: id="8276142b-bbd2-4abf-b667-b4542fabc182" response="200" I0125 05:14:38.392627 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.013121ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:38.392991 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:14:38.502627 4678 namespace_controller.go:206] Finished syncing namespace "openshift" (503ns) I0125 05:14:38.502664 4678 namespace_controller.go:206] Finished syncing namespace "default" (191ns) I0125 05:14:38.502671 4678 namespace_controller.go:206] Finished syncing namespace "kube-system" (187ns) I0125 05:14:38.502678 4678 namespace_controller.go:206] Finished syncing namespace "openshift-infra" (169ns) I0125 05:14:38.502685 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (185ns) I0125 05:14:38.502693 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (174ns) I0125 05:14:38.502700 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (187ns) I0125 05:14:38.550993 4678 panics.go:76] GET /oapi/v1/watch/buildconfigs?resourceVersion=10090&timeoutSeconds=551: (9m11.001154087s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:14:38.551234 4678 reflector.go:392] github.com/openshift/origin/pkg/build/controller/factory/factory.go:352: Watch close - *api.BuildConfig total 4 items received I0125 05:14:38.551851 4678 audit.go:125] 2017-01-25T05:14:38.551815497-05:00 AUDIT: id="cdf94c02-5d0b-442c-8363-8b9087749a03" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/watch/buildconfigs?resourceVersion=10733&timeoutSeconds=531" I0125 05:14:38.552373 4678 audit.go:45] 2017-01-25T05:14:38.552363009-05:00 AUDIT: id="cdf94c02-5d0b-442c-8363-8b9087749a03" response="200" I0125 05:14:39.068192 4678 audit.go:125] 2017-01-25T05:14:39.068155651-05:00 AUDIT: id="62f5d3db-0214-40ff-988f-1d64155cb92b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:39.069511 4678 audit.go:45] 2017-01-25T05:14:39.069500485-05:00 AUDIT: id="62f5d3db-0214-40ff-988f-1d64155cb92b" response="200" I0125 05:14:39.069593 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.952172ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:39.261006 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:39.261041 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:39.261728 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:39.261744 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:39.262317 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42c36bb00 -1 [] true false map[] 0xc42ee97590 } I0125 05:14:39.262363 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:39.262317 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42c2ee340 -1 [] true false map[] 0xc42fc89860 } I0125 05:14:39.262386 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:39.272395 4678 audit.go:125] 2017-01-25T05:14:39.272360297-05:00 AUDIT: id="21540bcd-7577-476b-ba58-b82348bcbe3c" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:14:39.273302 4678 audit.go:45] 2017-01-25T05:14:39.273287647-05:00 AUDIT: id="21540bcd-7577-476b-ba58-b82348bcbe3c" response="200" I0125 05:14:39.273380 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.934065ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:39.273649 4678 controller.go:106] Found 0 cronjobs I0125 05:14:39.275539 4678 audit.go:125] 2017-01-25T05:14:39.275500873-05:00 AUDIT: id="df21f59d-302d-49c5-ac9a-5d752a39b502" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:14:39.276384 4678 audit.go:45] 2017-01-25T05:14:39.27637406-05:00 AUDIT: id="df21f59d-302d-49c5-ac9a-5d752a39b502" response="200" I0125 05:14:39.276436 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.553256ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:39.276594 4678 controller.go:114] Found 0 jobs I0125 05:14:39.276601 4678 controller.go:117] Found 0 groups I0125 05:14:39.684630 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:40.068872 4678 audit.go:125] 2017-01-25T05:14:40.068810728-05:00 AUDIT: id="3ea38049-023b-40e8-bbb2-99d625c0c82b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:40.070677 4678 audit.go:45] 2017-01-25T05:14:40.070655277-05:00 AUDIT: id="3ea38049-023b-40e8-bbb2-99d625c0c82b" response="200" I0125 05:14:40.070784 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (4.046321ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:41.068283 4678 audit.go:125] 2017-01-25T05:14:41.068244597-05:00 AUDIT: id="e3bb7297-af34-4d6d-aee0-349a3e09ed55" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:41.069674 4678 audit.go:45] 2017-01-25T05:14:41.069661898-05:00 AUDIT: id="e3bb7297-af34-4d6d-aee0-349a3e09ed55" response="200" I0125 05:14:41.069749 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.201345ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:41.684597 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:41.953518 4678 audit.go:125] 2017-01-25T05:14:41.953475836-05:00 AUDIT: id="960d66ad-5edf-4e73-bb4f-6a563bcaf706" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:14:41.954625 4678 audit.go:45] 2017-01-25T05:14:41.954615126-05:00 AUDIT: id="960d66ad-5edf-4e73-bb4f-6a563bcaf706" response="200" I0125 05:14:41.954705 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.463839ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:42.068211 4678 audit.go:125] 2017-01-25T05:14:42.068158279-05:00 AUDIT: id="c123696b-5ba3-460b-8011-20c4cbbf2115" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:42.069512 4678 audit.go:45] 2017-01-25T05:14:42.069500508-05:00 AUDIT: id="c123696b-5ba3-460b-8011-20c4cbbf2115" response="200" I0125 05:14:42.069581 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.011664ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:42.485815 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:14:42.486231 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:42.498881 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:42.549725 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:42.549749 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:42.904478 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:14:42.904872 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:42.905441 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:42.956264 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:42.956289 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:42.967211 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:42.967237 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:42.967871 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:42.967888 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:42.968271 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:42 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42b4b9f20 0 [] true false map[] 0xc43601d680 } I0125 05:14:42.968332 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:42.968868 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:42 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42b7bee60 0 [] true false map[] 0xc43227bd10 } I0125 05:14:42.968903 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:43.068265 4678 audit.go:125] 2017-01-25T05:14:43.068218589-05:00 AUDIT: id="a6022e38-6014-4b4a-8472-6b037971d919" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:43.069587 4678 audit.go:45] 2017-01-25T05:14:43.069576233-05:00 AUDIT: id="a6022e38-6014-4b4a-8472-6b037971d919" response="200" I0125 05:14:43.069668 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.038534ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:43.393917 4678 audit.go:125] 2017-01-25T05:14:43.393880675-05:00 AUDIT: id="ab5c6c5c-95a4-46b7-9469-9219ea677fe7" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:43.394354 4678 audit.go:45] 2017-01-25T05:14:43.39434261-05:00 AUDIT: id="ab5c6c5c-95a4-46b7-9469-9219ea677fe7" response="200" I0125 05:14:43.394692 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.017759ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:43.684661 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:44.068190 4678 audit.go:125] 2017-01-25T05:14:44.068149799-05:00 AUDIT: id="08156183-2a46-48ee-b7d7-0eba82373acc" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:44.069513 4678 audit.go:45] 2017-01-25T05:14:44.069501428-05:00 AUDIT: id="08156183-2a46-48ee-b7d7-0eba82373acc" response="200" I0125 05:14:44.069581 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.013066ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:45.068063 4678 audit.go:125] 2017-01-25T05:14:45.068019742-05:00 AUDIT: id="9e066388-db33-45f2-b955-2805752fb8b1" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:45.069398 4678 audit.go:45] 2017-01-25T05:14:45.069385855-05:00 AUDIT: id="9e066388-db33-45f2-b955-2805752fb8b1" response="200" I0125 05:14:45.069470 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.887559ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:45.315591 4678 audit.go:125] 2017-01-25T05:14:45.315553961-05:00 AUDIT: id="1d8ed47d-4521-4138-9736-6a87c92655c5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:14:45.316593 4678 audit.go:45] 2017-01-25T05:14:45.3165782-05:00 AUDIT: id="1d8ed47d-4521-4138-9736-6a87c92655c5" response="200" I0125 05:14:45.316664 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.316382ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:45.684602 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:46.068212 4678 audit.go:125] 2017-01-25T05:14:46.068151876-05:00 AUDIT: id="7f19a1d3-5723-440e-8273-8b34b78a7eec" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:46.069529 4678 audit.go:45] 2017-01-25T05:14:46.069517902-05:00 AUDIT: id="7f19a1d3-5723-440e-8273-8b34b78a7eec" response="200" I0125 05:14:46.069603 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.015273ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:47.068312 4678 audit.go:125] 2017-01-25T05:14:47.068266945-05:00 AUDIT: id="b96791e8-bfe9-4bb0-9b1c-03250ed958fc" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:47.069664 4678 audit.go:45] 2017-01-25T05:14:47.069652251-05:00 AUDIT: id="b96791e8-bfe9-4bb0-9b1c-03250ed958fc" response="200" I0125 05:14:47.069732 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.145555ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:47.684672 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:48.068256 4678 audit.go:125] 2017-01-25T05:14:48.068210844-05:00 AUDIT: id="a18e922a-584b-4599-89e4-8220b437bc29" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:48.069485 4678 audit.go:45] 2017-01-25T05:14:48.069474476-05:00 AUDIT: id="a18e922a-584b-4599-89e4-8220b437bc29" response="200" I0125 05:14:48.069566 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.981919ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:48.358666 4678 audit.go:125] 2017-01-25T05:14:48.358619612-05:00 AUDIT: id="91a02657-aa7e-4adc-b952-dfeac9cbcaf5" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:14:48.359229 4678 audit.go:45] 2017-01-25T05:14:48.359213984-05:00 AUDIT: id="91a02657-aa7e-4adc-b952-dfeac9cbcaf5" response="200" I0125 05:14:48.359614 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.281747ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:48.383433 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:48.383457 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:48.399154 4678 audit.go:125] 2017-01-25T05:14:48.399112162-05:00 AUDIT: id="d8e9fc91-aa83-4281-9180-8001f7f42652" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:48.399652 4678 audit.go:45] 2017-01-25T05:14:48.39963881-05:00 AUDIT: id="d8e9fc91-aa83-4281-9180-8001f7f42652" response="200" I0125 05:14:48.399998 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.181107ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:48.481094 4678 audit.go:125] 2017-01-25T05:14:48.481051276-05:00 AUDIT: id="16555e30-fd77-49b5-a4b9-abc6ec1a6c52" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:14:48.484293 4678 audit.go:45] 2017-01-25T05:14:48.484277185-05:00 AUDIT: id="16555e30-fd77-49b5-a4b9-abc6ec1a6c52" response="200" I0125 05:14:48.485226 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (4.44172ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:48.485649 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:14:48.503361 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:14:48.684613 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094) I0125 05:14:48.684665 4678 kubelet_pods.go:1029] Generating status for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:48.684849 4678 status_manager.go:312] Ignoring same status for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:42 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.3 StartTime:2017-01-25 03:40:22 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:registry State:{Waiting: Running:0xc427d97dc0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-docker-registry:86a9783 ImageID:docker://sha256:3ec55bd72e2d99d049485e7f0556140392c415053ffba63b99bdeca83d4e5b7f ContainerID:docker://b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b}]} I0125 05:14:48.684955 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:48.699075 4678 secret.go:179] Setting up volume registry-token-vjbst for pod e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:14:48.699781 4678 audit.go:125] 2017-01-25T05:14:48.69974679-05:00 AUDIT: id="b53f7e77-c5ab-4166-8ba9-774934aaee3a" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-token-vjbst" I0125 05:14:48.700965 4678 audit.go:45] 2017-01-25T05:14:48.700953258-05:00 AUDIT: id="b53f7e77-c5ab-4166-8ba9-774934aaee3a" response="200" I0125 05:14:48.701149 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-token-vjbst: (1.606258ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:48.701348 4678 secret.go:206] Received secret default/registry-token-vjbst containing (4) pieces of data, 4113 total bytes I0125 05:14:48.701702 4678 atomic_writer.go:142] pod default/docker-registry-1-xppm3 volume registry-token-vjbst: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:14:48.701798 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094-registry-token-vjbst" (spec.Name: "registry-token-vjbst") pod "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094" (UID: "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094"). I0125 05:14:48.985201 4678 volume_manager.go:365] All volumes are attached and mounted for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:48.985872 4678 audit.go:125] 2017-01-25T05:14:48.985831704-05:00 AUDIT: id="c6b1f129-47dc-4e37-88cf-997cc5911266" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c" I0125 05:14:48.987027 4678 audit.go:45] 2017-01-25T05:14:48.987016255-05:00 AUDIT: id="c6b1f129-47dc-4e37-88cf-997cc5911266" response="200" I0125 05:14:48.987224 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c: (1.59703ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:48.987443 4678 docker_manager.go:1938] Found pod infra container for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:48.987517 4678 docker_manager.go:1951] Pod infra container looks good, keep it "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:14:48.987532 4678 docker_manager.go:1999] pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" container "registry" exists as b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b I0125 05:14:48.987662 4678 docker_manager.go:2086] Got container changes for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59:-1 b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b:0]} I0125 05:14:49.068181 4678 audit.go:125] 2017-01-25T05:14:49.068137578-05:00 AUDIT: id="f36abbe4-1597-4ac1-8d12-b8ed9308f5c6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:49.069551 4678 audit.go:45] 2017-01-25T05:14:49.069538374-05:00 AUDIT: id="f36abbe4-1597-4ac1-8d12-b8ed9308f5c6" response="200" I0125 05:14:49.069626 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.03668ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:49.133759 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:14:49.133824 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:14:49.133858 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:49.133886 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:49.133897 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:14:49.133906 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:49.133917 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:49.133977 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:49.133983 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:49.133988 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:49.133992 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:49.134020 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:14:49.134026 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:14:49.134069 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:14:49.134078 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:14:49.134097 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:49.134099 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:14:49.134112 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:49.134118 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:14:49.134161 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:14:49.134181 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:14:49.134192 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:14:49.134205 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:14:49.134214 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:14:49.134218 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:14:49.134239 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:14:49.261000 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:49.261028 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:49.261732 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:49.261751 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:49.262127 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc427992100 -1 [] true false map[] 0xc43544c870 } I0125 05:14:49.262162 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:49.262497 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42b8beb40 -1 [] true false map[] 0xc4318d1770 } I0125 05:14:49.262524 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:49.279192 4678 audit.go:125] 2017-01-25T05:14:49.279156849-05:00 AUDIT: id="912ac9b0-6170-434f-ac51-92c5e12da968" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:14:49.280091 4678 audit.go:45] 2017-01-25T05:14:49.280075111-05:00 AUDIT: id="912ac9b0-6170-434f-ac51-92c5e12da968" response="200" I0125 05:14:49.280161 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.924258ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:49.280445 4678 controller.go:106] Found 0 cronjobs I0125 05:14:49.282408 4678 audit.go:125] 2017-01-25T05:14:49.282373239-05:00 AUDIT: id="cd6159cf-5253-4a9a-a874-7b2f8cf4efda" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:14:49.283258 4678 audit.go:45] 2017-01-25T05:14:49.283248281-05:00 AUDIT: id="cd6159cf-5253-4a9a-a874-7b2f8cf4efda" response="200" I0125 05:14:49.283310 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.62505ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:49.283498 4678 controller.go:114] Found 0 jobs I0125 05:14:49.283506 4678 controller.go:117] Found 0 groups I0125 05:14:49.684610 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:50.068110 4678 audit.go:125] 2017-01-25T05:14:50.068065791-05:00 AUDIT: id="a6a4d3db-25e5-436d-a24b-50533683002e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:50.069493 4678 audit.go:45] 2017-01-25T05:14:50.069478841-05:00 AUDIT: id="a6a4d3db-25e5-436d-a24b-50533683002e" response="200" I0125 05:14:50.069575 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.987613ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:50.325832 4678 panics.go:76] GET /api/v1/watch/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=10205&timeoutSeconds=413: (6m53.001084458s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:50.326074 4678 reflector.go:392] pkg/kubelet/kubelet.go:386: Watch close - *api.Node total 41 items received I0125 05:14:50.326729 4678 audit.go:125] 2017-01-25T05:14:50.326692551-05:00 AUDIT: id="ef275ffc-588d-4cf0-b230-d65a91fb081f" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/watch/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=11192&timeoutSeconds=318" I0125 05:14:50.327142 4678 audit.go:45] 2017-01-25T05:14:50.32712986-05:00 AUDIT: id="ef275ffc-588d-4cf0-b230-d65a91fb081f" response="200" I0125 05:14:50.641452 4678 panics.go:76] GET /oapi/v1/watch/deploymentconfigs?resourceVersion=10490&timeoutSeconds=317: (5m17.000955686s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:14:50.641697 4678 reflector.go:392] github.com/openshift/origin/pkg/controller/shared/shared_informer.go:89: Watch close - *api.DeploymentConfig total 63 items received I0125 05:14:50.642338 4678 audit.go:125] 2017-01-25T05:14:50.642306972-05:00 AUDIT: id="954ecccd-f8b0-4f12-9d0f-d54201e140cb" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/watch/deploymentconfigs?resourceVersion=11165&timeoutSeconds=364" I0125 05:14:50.642809 4678 audit.go:45] 2017-01-25T05:14:50.642798741-05:00 AUDIT: id="954ecccd-f8b0-4f12-9d0f-d54201e140cb" response="200" I0125 05:14:51.068320 4678 audit.go:125] 2017-01-25T05:14:51.068271871-05:00 AUDIT: id="a954c54e-692a-4553-b236-dcc3a52e994f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:51.069585 4678 audit.go:45] 2017-01-25T05:14:51.069574291-05:00 AUDIT: id="a954c54e-692a-4553-b236-dcc3a52e994f" response="200" I0125 05:14:51.069655 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.085013ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:51.684605 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:51.955792 4678 audit.go:125] 2017-01-25T05:14:51.955757879-05:00 AUDIT: id="f328d031-8a28-46d6-9e28-6144eb4a012a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:14:51.956815 4678 audit.go:45] 2017-01-25T05:14:51.956804752-05:00 AUDIT: id="f328d031-8a28-46d6-9e28-6144eb4a012a" response="200" I0125 05:14:51.956890 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.337093ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:52.068259 4678 audit.go:125] 2017-01-25T05:14:52.068213946-05:00 AUDIT: id="ceaf514b-68ba-41a6-8426-015922b05de8" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:52.069538 4678 audit.go:45] 2017-01-25T05:14:52.069526926-05:00 AUDIT: id="ceaf514b-68ba-41a6-8426-015922b05de8" response="200" I0125 05:14:52.069611 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.062069ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:52.485833 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:14:52.486246 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:52.498911 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:52.549433 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:52.549457 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:14:52.904446 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:14:52.904830 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:52.905425 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:14:52.955346 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:14:52.955372 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:14:52.967151 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:52.967177 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:52.967908 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:14:52.967925 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:52.968104 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:52 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc426a914e0 0 [] true false map[] 0xc4314f44b0 } I0125 05:14:52.968178 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:52.968894 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:14:52 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4334bdfc0 0 [] true false map[] 0xc4315bfc20 } I0125 05:14:52.968925 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:14:53.068244 4678 audit.go:125] 2017-01-25T05:14:53.068178974-05:00 AUDIT: id="0870e363-15c3-4f0e-a43c-1ae7cac62ce8" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:53.069560 4678 audit.go:45] 2017-01-25T05:14:53.069545461-05:00 AUDIT: id="0870e363-15c3-4f0e-a43c-1ae7cac62ce8" response="200" I0125 05:14:53.069637 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.042673ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:53.401317 4678 audit.go:125] 2017-01-25T05:14:53.401276894-05:00 AUDIT: id="de2b54d1-1d59-4e74-879e-e89e6d9bcfca" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:53.401715 4678 audit.go:45] 2017-01-25T05:14:53.401706198-05:00 AUDIT: id="de2b54d1-1d59-4e74-879e-e89e6d9bcfca" response="200" I0125 05:14:53.402041 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (987.407µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:53.402284 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:14:53.684619 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:54.009350 4678 gc_controller.go:175] GC'ing orphaned I0125 05:14:54.009372 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:14:54.068355 4678 audit.go:125] 2017-01-25T05:14:54.068303598-05:00 AUDIT: id="79420e68-aca2-47aa-8ae8-4874cd669287" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:54.069812 4678 audit.go:45] 2017-01-25T05:14:54.069799074-05:00 AUDIT: id="79420e68-aca2-47aa-8ae8-4874cd669287" response="200" I0125 05:14:54.069896 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.280101ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:54.648831 4678 panics.go:76] GET /api/v1/watch/pods?resourceVersion=10165&timeoutSeconds=463: (7m43.001087163s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:54.649080 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *api.Pod total 155 items received I0125 05:14:54.649689 4678 audit.go:125] 2017-01-25T05:14:54.649660242-05:00 AUDIT: id="7596d4ce-1d7a-4de3-b12c-f1d557e74a74" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/pods?resourceVersion=11167&timeoutSeconds=399" I0125 05:14:54.650137 4678 audit.go:45] 2017-01-25T05:14:54.650127828-05:00 AUDIT: id="7596d4ce-1d7a-4de3-b12c-f1d557e74a74" response="200" I0125 05:14:55.068096 4678 audit.go:125] 2017-01-25T05:14:55.068051234-05:00 AUDIT: id="33ec60fa-2c53-4ec9-863e-9900d80a64d0" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:55.069416 4678 audit.go:45] 2017-01-25T05:14:55.069403229-05:00 AUDIT: id="33ec60fa-2c53-4ec9-863e-9900d80a64d0" response="200" I0125 05:14:55.069496 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.936135ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:55.321817 4678 audit.go:125] 2017-01-25T05:14:55.321779387-05:00 AUDIT: id="8008212c-369b-4d80-a036-efdb8d0ca0e1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:14:55.322782 4678 audit.go:45] 2017-01-25T05:14:55.322756266-05:00 AUDIT: id="8008212c-369b-4d80-a036-efdb8d0ca0e1" response="200" I0125 05:14:55.322843 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.258859ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:55.684622 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:56.068203 4678 audit.go:125] 2017-01-25T05:14:56.068157791-05:00 AUDIT: id="9e275625-c868-4eaf-9ca8-3b82b476bfdc" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:56.069498 4678 audit.go:45] 2017-01-25T05:14:56.069486098-05:00 AUDIT: id="9e275625-c868-4eaf-9ca8-3b82b476bfdc" response="200" I0125 05:14:56.069577 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.980199ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:57.068183 4678 audit.go:125] 2017-01-25T05:14:57.068143702-05:00 AUDIT: id="7b8aebd9-c7e3-46fc-b881-e57d743a822a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:57.069494 4678 audit.go:45] 2017-01-25T05:14:57.069480339-05:00 AUDIT: id="7b8aebd9-c7e3-46fc-b881-e57d743a822a" response="200" I0125 05:14:57.069566 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.990386ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:57.612061 4678 panics.go:76] GET /api/v1/watch/configmaps?resourceVersion=4&timeoutSeconds=562: (9m22.001170421s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:57.612341 4678 reflector.go:392] pkg/controller/resourcequota/resource_quota_controller.go:232: Watch close - *api.ConfigMap total 0 items received I0125 05:14:57.612941 4678 audit.go:125] 2017-01-25T05:14:57.612908903-05:00 AUDIT: id="ce53247b-6e3f-438c-a173-d772da13bc91" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/configmaps?resourceVersion=4&timeoutSeconds=318" I0125 05:14:57.613383 4678 audit.go:45] 2017-01-25T05:14:57.613373147-05:00 AUDIT: id="ce53247b-6e3f-438c-a173-d772da13bc91" response="200" I0125 05:14:57.684679 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:14:58.068109 4678 audit.go:125] 2017-01-25T05:14:58.068070801-05:00 AUDIT: id="1dd97bf4-6efc-4514-9c65-aa3bc82f1db4" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:58.069436 4678 audit.go:45] 2017-01-25T05:14:58.069424738-05:00 AUDIT: id="1dd97bf4-6efc-4514-9c65-aa3bc82f1db4" response="200" I0125 05:14:58.069506 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.911363ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:58.403161 4678 audit.go:125] 2017-01-25T05:14:58.403129113-05:00 AUDIT: id="b4f89023-7031-46c4-8bf6-79a736a0b7d4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:14:58.403577 4678 audit.go:45] 2017-01-25T05:14:58.40356635-05:00 AUDIT: id="b4f89023-7031-46c4-8bf6-79a736a0b7d4" response="200" I0125 05:14:58.403900 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (976.32µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:58.487218 4678 audit.go:125] 2017-01-25T05:14:58.487169619-05:00 AUDIT: id="de4f947c-bf32-49bd-942a-827870da8e02" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:14:58.487635 4678 audit.go:45] 2017-01-25T05:14:58.487625324-05:00 AUDIT: id="de4f947c-bf32-49bd-942a-827870da8e02" response="200" I0125 05:14:58.487947 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (973.38µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:58.505631 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:58.505654 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:14:58.585990 4678 audit.go:125] 2017-01-25T05:14:58.58594685-05:00 AUDIT: id="9d11a946-79e2-4861-a0ba-bacff97dcd2a" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:14:58.589123 4678 audit.go:45] 2017-01-25T05:14:58.589105208-05:00 AUDIT: id="9d11a946-79e2-4861-a0ba-bacff97dcd2a" response="200" I0125 05:14:58.590093 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (4.442227ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:14:58.590622 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:14:58.640914 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:14:59.110605 4678 audit.go:125] 2017-01-25T05:14:59.110534137-05:00 AUDIT: id="b16cc23a-a9f7-41ff-a7d4-50179f01566b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:14:59.114212 4678 audit.go:45] 2017-01-25T05:14:59.114172271-05:00 AUDIT: id="b16cc23a-a9f7-41ff-a7d4-50179f01566b" response="200" I0125 05:14:59.114359 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (47.798045ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:14:59.260993 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:59.261029 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:59.261693 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:14:59.261709 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:14:59.262216 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc4394ea140 -1 [] true false map[] 0xc42bb400f0 } I0125 05:14:59.262287 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:59.262359 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc4394ea220 -1 [] true false map[] 0xc42bb403c0 } I0125 05:14:59.262379 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:14:59.285930 4678 audit.go:125] 2017-01-25T05:14:59.28589838-05:00 AUDIT: id="20a4bcb6-ac72-4bb2-8979-25aca676903d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:14:59.286723 4678 audit.go:45] 2017-01-25T05:14:59.28671245-05:00 AUDIT: id="20a4bcb6-ac72-4bb2-8979-25aca676903d" response="200" I0125 05:14:59.286788 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.637629ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:59.287057 4678 controller.go:106] Found 0 cronjobs I0125 05:14:59.288751 4678 audit.go:125] 2017-01-25T05:14:59.288725126-05:00 AUDIT: id="07741473-1e51-4c1f-af60-cd0d814f157d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:14:59.289606 4678 audit.go:45] 2017-01-25T05:14:59.289595495-05:00 AUDIT: id="07741473-1e51-4c1f-af60-cd0d814f157d" response="200" I0125 05:14:59.289660 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.390923ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:14:59.289851 4678 controller.go:114] Found 0 jobs I0125 05:14:59.289859 4678 controller.go:117] Found 0 groups I0125 05:14:59.357770 4678 panics.go:76] GET /api/v1/watch/serviceaccounts?resourceVersion=10081&timeoutSeconds=456: (7m36.001085202s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:14:59.357998 4678 reflector.go:392] github.com/openshift/origin/pkg/serviceaccounts/controllers/create_dockercfg_secrets.go:220: Watch close - *api.ServiceAccount total 69 items received I0125 05:14:59.358564 4678 audit.go:125] 2017-01-25T05:14:59.358531456-05:00 AUDIT: id="7a4af957-cf49-463f-9b99-393fdd89038b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/serviceaccounts?resourceVersion=10932&timeoutSeconds=445" I0125 05:14:59.358962 4678 audit.go:45] 2017-01-25T05:14:59.358952424-05:00 AUDIT: id="7a4af957-cf49-463f-9b99-393fdd89038b" response="200" I0125 05:14:59.684629 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:00.068199 4678 audit.go:125] 2017-01-25T05:15:00.068140493-05:00 AUDIT: id="46fafae9-1fbd-42fb-bf27-75811cc7d81d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:00.069523 4678 audit.go:45] 2017-01-25T05:15:00.06951077-05:00 AUDIT: id="46fafae9-1fbd-42fb-bf27-75811cc7d81d" response="200" I0125 05:15:00.069605 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.047197ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:01.068184 4678 audit.go:125] 2017-01-25T05:15:01.06814671-05:00 AUDIT: id="ba0d2379-a792-4221-9269-5092648e0527" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:01.069455 4678 audit.go:45] 2017-01-25T05:15:01.069443979-05:00 AUDIT: id="ba0d2379-a792-4221-9269-5092648e0527" response="200" I0125 05:15:01.069536 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.988452ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:01.546077 4678 kubelet.go:1138] Container garbage collection succeeded I0125 05:15:01.684657 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:01.957992 4678 audit.go:125] 2017-01-25T05:15:01.957959718-05:00 AUDIT: id="e1aaa8be-8692-431f-a74f-90d04b5154c3" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:15:01.959093 4678 audit.go:45] 2017-01-25T05:15:01.959081855-05:00 AUDIT: id="e1aaa8be-8692-431f-a74f-90d04b5154c3" response="200" I0125 05:15:01.959169 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.399128ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:02.068120 4678 audit.go:125] 2017-01-25T05:15:02.06808134-05:00 AUDIT: id="d82d3dd9-f858-48d0-afec-58340c0ee1af" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:02.069426 4678 audit.go:45] 2017-01-25T05:15:02.069414744-05:00 AUDIT: id="d82d3dd9-f858-48d0-afec-58340c0ee1af" response="200" I0125 05:15:02.069496 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.949512ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:02.425733 4678 iptables.go:362] running iptables -N [KUBE-MARK-DROP -t nat] I0125 05:15:02.445324 4678 iptables.go:362] running iptables -C [KUBE-MARK-DROP -t nat -j MARK --set-xmark 0x00008000/0x00008000] I0125 05:15:02.464793 4678 iptables.go:362] running iptables -N [KUBE-FIREWALL -t filter] I0125 05:15:02.484006 4678 iptables.go:362] running iptables -C [KUBE-FIREWALL -t filter -m comment --comment kubernetes firewall for dropping marked packets -m mark --mark 0x00008000/0x00008000 -j DROP] I0125 05:15:02.493017 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:15:02.494419 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:02.498788 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:02.508132 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -j KUBE-FIREWALL] I0125 05:15:02.541385 4678 iptables.go:362] running iptables -C [INPUT -t filter -j KUBE-FIREWALL] I0125 05:15:02.560326 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:02.560371 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:02.569900 4678 iptables.go:362] running iptables -N [KUBE-MARK-MASQ -t nat] I0125 05:15:02.589724 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:15:02.608787 4678 iptables.go:362] running iptables -C [KUBE-MARK-MASQ -t nat -j MARK --set-xmark 0x00004000/0x00004000] I0125 05:15:02.627999 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:15:02.646559 4678 iptables.go:362] running iptables -C [KUBE-POSTROUTING -t nat -m comment --comment kubernetes service traffic requiring SNAT -m mark --mark 0x00004000/0x00004000 -j MASQUERADE] I0125 05:15:02.904520 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:15:02.904894 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:02.905458 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:02.957066 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:02.957095 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:02.967170 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:02.967206 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:02.967965 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:02.967984 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:02.968537 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:02 GMT] Content-Length:[0]] 0xc42d922e20 0 [] true false map[] 0xc42b0693b0 } I0125 05:15:02.968598 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:02.968931 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:02 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4357b22c0 0 [] true false map[] 0xc43b0fd2c0 } I0125 05:15:02.968973 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:03.068463 4678 audit.go:125] 2017-01-25T05:15:03.068421658-05:00 AUDIT: id="68338a0e-d0d6-4013-82fb-dc7c881d863f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:03.069857 4678 audit.go:45] 2017-01-25T05:15:03.069843896-05:00 AUDIT: id="68338a0e-d0d6-4013-82fb-dc7c881d863f" response="200" I0125 05:15:03.069934 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.295143ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:03.405101 4678 audit.go:125] 2017-01-25T05:15:03.405064959-05:00 AUDIT: id="3532fb5c-1abb-4d9b-9912-b74a0beee3f4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:03.405549 4678 audit.go:45] 2017-01-25T05:15:03.405539765-05:00 AUDIT: id="3532fb5c-1abb-4d9b-9912-b74a0beee3f4" response="200" I0125 05:15:03.405912 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.066683ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:03.406146 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:15:03.573286 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:15:03.573684 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (2.617µs) I0125 05:15:03.579005 4678 audit.go:125] 2017-01-25T05:15:03.578962096-05:00 AUDIT: id="7e8e594a-2257-45f2-9fed-8520d5bbbf40" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:15:03.579121 4678 audit.go:125] 2017-01-25T05:15:03.579089534-05:00 AUDIT: id="da8d03d0-d2a6-4643-81d6-ff97736c3d6c" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:15:03.579436 4678 audit.go:125] 2017-01-25T05:15:03.579406662-05:00 AUDIT: id="c1891a1d-77dd-463e-9768-7dbcbcfddc50" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:15:03.579576 4678 audit.go:125] 2017-01-25T05:15:03.579545489-05:00 AUDIT: id="20c76c09-eeef-43ed-88ea-d6f16d656400" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:15:03.579731 4678 audit.go:125] 2017-01-25T05:15:03.579702399-05:00 AUDIT: id="f3b1d844-8cb4-49d6-bf1d-d318910cbe65" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:15:03.581073 4678 audit.go:45] 2017-01-25T05:15:03.581058389-05:00 AUDIT: id="c1891a1d-77dd-463e-9768-7dbcbcfddc50" response="200" I0125 05:15:03.581144 4678 audit.go:45] 2017-01-25T05:15:03.581133875-05:00 AUDIT: id="7e8e594a-2257-45f2-9fed-8520d5bbbf40" response="200" I0125 05:15:03.581154 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (6.265524ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:03.581193 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (5.8333ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:03.581295 4678 audit.go:45] 2017-01-25T05:15:03.581284912-05:00 AUDIT: id="20c76c09-eeef-43ed-88ea-d6f16d656400" response="200" I0125 05:15:03.581073 4678 audit.go:45] 2017-01-25T05:15:03.581058349-05:00 AUDIT: id="f3b1d844-8cb4-49d6-bf1d-d318910cbe65" response="200" I0125 05:15:03.581342 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (6.85178ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:03.581368 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (6.888469ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:03.581456 4678 audit.go:45] 2017-01-25T05:15:03.581445622-05:00 AUDIT: id="da8d03d0-d2a6-4643-81d6-ff97736c3d6c" response="200" I0125 05:15:03.581498 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (6.237394ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:03.581763 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (8.311068ms) I0125 05:15:03.581791 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (7.89594ms) I0125 05:15:03.581822 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (8.364227ms) I0125 05:15:03.581852 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:15:03.581900 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (8.189443ms) I0125 05:15:03.583626 4678 audit.go:125] 2017-01-25T05:15:03.583599425-05:00 AUDIT: id="e1e18e1d-56b7-46ce-b7a5-31be183c3365" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:15:03.584915 4678 audit.go:45] 2017-01-25T05:15:03.584901806-05:00 AUDIT: id="e1e18e1d-56b7-46ce-b7a5-31be183c3365" response="200" I0125 05:15:03.584977 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (2.659898ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:03.585231 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (11.799982ms) I0125 05:15:03.585890 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:15:03.586039 4678 proxier.go:804] Syncing iptables rules I0125 05:15:03.586048 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:15:03.594868 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:15:03.594975 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:15:03.595043 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:15:03.595089 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:15:03.595103 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:15:03.595114 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:15:03.595122 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:15:03.595130 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:15:03.595140 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:15:03.605120 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:15:03.624542 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:03.643342 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:03.662559 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:03.673673 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:15:03.681507 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:15:03.691242 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:03.704433 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:15:03.723213 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:15:03.741962 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:15:03.757490 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:15:03.757679 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:15:03.757701 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:15:03.762215 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:15:03.762258 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:15:03.783383 4678 proxier.go:797] syncProxyRules took 197.335853ms I0125 05:15:03.783418 4678 proxier.go:566] OnEndpointsUpdate took 197.469004ms for 6 endpoints I0125 05:15:03.783459 4678 proxier.go:381] Received update notice: [] I0125 05:15:03.783493 4678 proxier.go:804] Syncing iptables rules I0125 05:15:03.783502 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:15:03.802198 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:15:03.815271 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:15:03.827729 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:03.858292 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:15:03.860418 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:03.880142 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:03.899839 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:15:03.918675 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:15:03.939871 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:15:03.959011 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:15:03.979552 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:15:03.979587 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:15:03.989449 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:15:04.000508 4678 proxier.go:797] syncProxyRules took 217.004005ms I0125 05:15:04.000536 4678 proxier.go:431] OnServiceUpdate took 217.06608ms for 4 services I0125 05:15:04.068555 4678 audit.go:125] 2017-01-25T05:15:04.068501966-05:00 AUDIT: id="abd641e8-6ec3-4eb0-ac0b-b03d162daf46" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:04.069970 4678 audit.go:45] 2017-01-25T05:15:04.069957639-05:00 AUDIT: id="abd641e8-6ec3-4eb0-ac0b-b03d162daf46" response="200" I0125 05:15:04.070081 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.439695ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:04.134105 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:15:04.134176 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:15:04.134212 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:04.134236 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:04.134253 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:15:04.134261 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:04.134273 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:04.134297 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:15:04.134383 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:15:04.134407 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:04.134416 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:04.134423 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:04.134411 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:04.134430 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:04.134433 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:15:04.134455 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:04.134472 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:15:04.134477 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:04.134484 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:04.134479 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:04.134494 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:15:04.134495 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:15:04.134531 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:15:04.134537 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:04.134549 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:04.134579 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:05.068381 4678 audit.go:125] 2017-01-25T05:15:05.068333924-05:00 AUDIT: id="07dd706d-57a8-42af-8aac-919b7a1d5ccb" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:05.069813 4678 audit.go:45] 2017-01-25T05:15:05.069802446-05:00 AUDIT: id="07dd706d-57a8-42af-8aac-919b7a1d5ccb" response="200" I0125 05:15:05.069892 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.281155ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:05.327851 4678 audit.go:125] 2017-01-25T05:15:05.327819055-05:00 AUDIT: id="d656ee66-5a6b-48a6-84c2-8a43fa42fd90" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:15:05.328941 4678 audit.go:45] 2017-01-25T05:15:05.328926383-05:00 AUDIT: id="d656ee66-5a6b-48a6-84c2-8a43fa42fd90" response="200" I0125 05:15:05.329005 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.397613ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:05.610118 4678 panics.go:76] GET /api/v1/watch/secrets?resourceVersion=10082&timeoutSeconds=531: (8m51.001145034s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:05.610440 4678 reflector.go:392] pkg/controller/resourcequota/resource_quota_controller.go:232: Watch close - *api.Secret total 127 items received I0125 05:15:05.611111 4678 audit.go:125] 2017-01-25T05:15:05.61108313-05:00 AUDIT: id="1340d821-e3df-4030-b476-5b84cd620f4b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/secrets?resourceVersion=10974&timeoutSeconds=346" I0125 05:15:05.611599 4678 audit.go:45] 2017-01-25T05:15:05.611589862-05:00 AUDIT: id="1340d821-e3df-4030-b476-5b84cd620f4b" response="200" I0125 05:15:05.684633 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:06.074593 4678 audit.go:125] 2017-01-25T05:15:06.07452073-05:00 AUDIT: id="cf9fdf87-6192-4bcc-84c1-6df85c75b1df" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:06.076761 4678 audit.go:45] 2017-01-25T05:15:06.076740981-05:00 AUDIT: id="cf9fdf87-6192-4bcc-84c1-6df85c75b1df" response="200" I0125 05:15:06.076879 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (5.363927ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:06.684620 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094) I0125 05:15:06.684679 4678 kubelet_pods.go:1029] Generating status for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:06.684826 4678 status_manager.go:312] Ignoring same status for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:29 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.18.7.222 StartTime:2017-01-25 03:41:09 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:router State:{Waiting: Running:0xc434d64700 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-haproxy-router:86a9783 ImageID:docker://sha256:0e944dc1f6ca904b8892fd8e5da5ec5cf13c0f673b44380cc81c1fdbc53b379e ContainerID:docker://38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018}]} I0125 05:15:06.684948 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:06.778847 4678 secret.go:179] Setting up volume server-certificate for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:15:06.778921 4678 secret.go:179] Setting up volume router-token-s79l8 for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:15:06.779667 4678 audit.go:125] 2017-01-25T05:15:06.779639225-05:00 AUDIT: id="e47eddb5-eb28-492a-9013-f3bcbbdc0192" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-certs" I0125 05:15:06.780336 4678 audit.go:125] 2017-01-25T05:15:06.780307677-05:00 AUDIT: id="23c75f1b-9673-4c97-94df-64771f94d9dc" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-token-s79l8" I0125 05:15:06.781322 4678 audit.go:45] 2017-01-25T05:15:06.781307191-05:00 AUDIT: id="e47eddb5-eb28-492a-9013-f3bcbbdc0192" response="200" I0125 05:15:06.781397 4678 audit.go:45] 2017-01-25T05:15:06.781384823-05:00 AUDIT: id="23c75f1b-9673-4c97-94df-64771f94d9dc" response="200" I0125 05:15:06.781638 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-certs: (2.196213ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:06.781674 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-token-s79l8: (1.552816ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:06.781906 4678 secret.go:206] Received secret default/router-token-s79l8 containing (4) pieces of data, 4105 total bytes I0125 05:15:06.782055 4678 secret.go:206] Received secret default/router-certs containing (2) pieces of data, 6633 total bytes I0125 05:15:06.782575 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume server-certificate: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:15:06.782594 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-server-certificate" (spec.Name: "server-certificate") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:15:06.782603 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume router-token-s79l8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:15:06.782619 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-router-token-s79l8" (spec.Name: "router-token-s79l8") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:15:06.985204 4678 volume_manager.go:365] All volumes are attached and mounted for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:06.985857 4678 audit.go:125] 2017-01-25T05:15:06.985827029-05:00 AUDIT: id="57bceccb-7179-4f74-aa22-c2e9b99da099" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-dockercfg-g5x9s" I0125 05:15:06.987045 4678 audit.go:45] 2017-01-25T05:15:06.987034913-05:00 AUDIT: id="57bceccb-7179-4f74-aa22-c2e9b99da099" response="200" I0125 05:15:06.987265 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-dockercfg-g5x9s: (1.663805ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:06.987397 4678 docker_manager.go:1938] Found pod infra container for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:06.990983 4678 docker_manager.go:1951] Pod infra container looks good, keep it "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:06.990999 4678 docker_manager.go:1999] pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" container "router" exists as 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018 I0125 05:15:06.991151 4678 docker_manager.go:2086] Got container changes for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985:-1 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018:0]} I0125 05:15:07.068212 4678 audit.go:125] 2017-01-25T05:15:07.068173101-05:00 AUDIT: id="c028f0c0-31e9-421c-bd72-9d64665f53e2" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:07.069555 4678 audit.go:45] 2017-01-25T05:15:07.069542769-05:00 AUDIT: id="c028f0c0-31e9-421c-bd72-9d64665f53e2" response="200" I0125 05:15:07.069640 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.066462ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:07.072645 4678 proxier.go:804] Syncing iptables rules I0125 05:15:07.072661 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:15:07.091894 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:15:07.110746 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:07.129616 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:07.148421 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:07.167176 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:15:07.185680 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:15:07.204445 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:15:07.223462 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:15:07.243373 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:15:07.243411 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:15:07.264139 4678 proxier.go:797] syncProxyRules took 191.486142ms I0125 05:15:07.264175 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:15:07.283034 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:15:07.302268 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:15:07.322246 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:15:07.340673 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:15:07.359184 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:15:07.377697 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:15:07.396676 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:15:07.415395 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:15:07.433941 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:15:07.684680 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:08.068479 4678 audit.go:125] 2017-01-25T05:15:08.068437081-05:00 AUDIT: id="e35d60ba-2eff-4f81-a6a8-6bee2c984422" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:08.069903 4678 audit.go:45] 2017-01-25T05:15:08.069889194-05:00 AUDIT: id="e35d60ba-2eff-4f81-a6a8-6bee2c984422" response="200" I0125 05:15:08.070002 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.354232ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:08.407112 4678 audit.go:125] 2017-01-25T05:15:08.407077699-05:00 AUDIT: id="f235d59d-4d7d-443a-96bb-1546b4d750bb" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:08.407545 4678 audit.go:45] 2017-01-25T05:15:08.407536517-05:00 AUDIT: id="f235d59d-4d7d-443a-96bb-1546b4d750bb" response="200" I0125 05:15:08.407880 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.01163ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:08.591322 4678 audit.go:125] 2017-01-25T05:15:08.591288373-05:00 AUDIT: id="af5fa156-21db-4e97-9770-e573541fa381" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:15:08.591721 4678 audit.go:45] 2017-01-25T05:15:08.591711267-05:00 AUDIT: id="af5fa156-21db-4e97-9770-e573541fa381" response="200" I0125 05:15:08.592008 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (931.532µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:08.649096 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:08.649121 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:08.671380 4678 audit.go:125] 2017-01-25T05:15:08.671335219-05:00 AUDIT: id="34b8d748-ec31-4652-8dd6-17d76bad7d9a" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:15:08.674575 4678 audit.go:45] 2017-01-25T05:15:08.67455851-05:00 AUDIT: id="34b8d748-ec31-4652-8dd6-17d76bad7d9a" response="200" I0125 05:15:08.675519 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (4.488145ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:08.676109 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:15:08.756422 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:15:09.068278 4678 audit.go:125] 2017-01-25T05:15:09.068228472-05:00 AUDIT: id="b1b8799c-f841-40e9-aa3a-a3f543dfc109" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:09.069637 4678 audit.go:45] 2017-01-25T05:15:09.06962628-05:00 AUDIT: id="b1b8799c-f841-40e9-aa3a-a3f543dfc109" response="200" I0125 05:15:09.069705 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.113856ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:09.261000 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:09.261031 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:09.261694 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:09.261706 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:09.262286 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc42e288c00 -1 [] true false map[] 0xc4360e70e0 } I0125 05:15:09.262323 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:09.262389 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42e288ce0 -1 [] true false map[] 0xc435d614a0 } I0125 05:15:09.262408 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:09.292475 4678 audit.go:125] 2017-01-25T05:15:09.292434019-05:00 AUDIT: id="be16bf84-63c7-4e9d-b63d-7b14aab554c7" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:15:09.293271 4678 audit.go:45] 2017-01-25T05:15:09.293260811-05:00 AUDIT: id="be16bf84-63c7-4e9d-b63d-7b14aab554c7" response="200" I0125 05:15:09.293343 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.846469ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:09.293597 4678 controller.go:106] Found 0 cronjobs I0125 05:15:09.295326 4678 audit.go:125] 2017-01-25T05:15:09.295296109-05:00 AUDIT: id="05618586-58e7-4065-99ea-4eb487d8d858" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:15:09.296177 4678 audit.go:45] 2017-01-25T05:15:09.29616697-05:00 AUDIT: id="05618586-58e7-4065-99ea-4eb487d8d858" response="200" I0125 05:15:09.296244 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.42974ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:09.296470 4678 controller.go:114] Found 0 jobs I0125 05:15:09.296478 4678 controller.go:117] Found 0 groups I0125 05:15:09.684635 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:10.068187 4678 audit.go:125] 2017-01-25T05:15:10.068145292-05:00 AUDIT: id="aace49ca-7dac-4561-aafc-67e4ca443e8c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:10.069425 4678 audit.go:45] 2017-01-25T05:15:10.069414293-05:00 AUDIT: id="aace49ca-7dac-4561-aafc-67e4ca443e8c" response="200" I0125 05:15:10.069504 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.932881ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:10.442868 4678 panics.go:76] GET /api/v1/watch/persistentvolumeclaims?resourceVersion=8550&timeoutSeconds=573: (9m33.001296769s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:10.443123 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *api.PersistentVolumeClaim total 3 items received I0125 05:15:10.443759 4678 audit.go:125] 2017-01-25T05:15:10.443728998-05:00 AUDIT: id="ad2a1737-c627-4169-ac9c-97ad8088209d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/persistentvolumeclaims?resourceVersion=10942&timeoutSeconds=545" I0125 05:15:10.444224 4678 audit.go:45] 2017-01-25T05:15:10.444213373-05:00 AUDIT: id="ad2a1737-c627-4169-ac9c-97ad8088209d" response="200" I0125 05:15:11.068151 4678 audit.go:125] 2017-01-25T05:15:11.068111675-05:00 AUDIT: id="cc1c4822-ce2f-4c3d-aad6-7f560a6ae681" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:11.069453 4678 audit.go:45] 2017-01-25T05:15:11.069441804-05:00 AUDIT: id="cc1c4822-ce2f-4c3d-aad6-7f560a6ae681" response="200" I0125 05:15:11.069533 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.952414ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:11.684620 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:11.960300 4678 audit.go:125] 2017-01-25T05:15:11.960259936-05:00 AUDIT: id="afc25331-ae15-4671-bed0-718459dea236" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:15:11.961413 4678 audit.go:45] 2017-01-25T05:15:11.961401597-05:00 AUDIT: id="afc25331-ae15-4671-bed0-718459dea236" response="200" I0125 05:15:11.961490 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.45218ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:12.068312 4678 audit.go:125] 2017-01-25T05:15:12.068263279-05:00 AUDIT: id="f998d7a9-aefb-43bc-9b49-bfb8f23822d7" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:12.069634 4678 audit.go:45] 2017-01-25T05:15:12.069621647-05:00 AUDIT: id="f998d7a9-aefb-43bc-9b49-bfb8f23822d7" response="200" I0125 05:15:12.069718 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.132902ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:12.485848 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:15:12.486286 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:12.498931 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:12.548875 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:12.548898 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:12.904460 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:15:12.904845 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:12.905422 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:12.957838 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:12.957861 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:12.967170 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:12.967195 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:12.967950 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:12.967966 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:12.968732 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:12 GMT] Content-Length:[0]] 0xc432bbe020 0 [] true false map[] 0xc43917dc20 } I0125 05:15:12.968782 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:12.968889 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:12 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc432bbe140 0 [] true false map[] 0xc432be8b40 } I0125 05:15:12.968920 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:13.068126 4678 audit.go:125] 2017-01-25T05:15:13.068087957-05:00 AUDIT: id="7ec8fee1-7f3c-449c-a8fd-d7ab1a0f86d8" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:13.069423 4678 audit.go:45] 2017-01-25T05:15:13.06941032-05:00 AUDIT: id="7ec8fee1-7f3c-449c-a8fd-d7ab1a0f86d8" response="200" I0125 05:15:13.069499 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.8957ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:13.408932 4678 audit.go:125] 2017-01-25T05:15:13.408897891-05:00 AUDIT: id="172c81eb-cfb3-4849-bddc-964abe97d7ed" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:13.409355 4678 audit.go:45] 2017-01-25T05:15:13.409344555-05:00 AUDIT: id="172c81eb-cfb3-4849-bddc-964abe97d7ed" response="200" I0125 05:15:13.409678 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (980.237µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:13.409884 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:15:13.684626 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:14.009567 4678 gc_controller.go:175] GC'ing orphaned I0125 05:15:14.009588 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:15:14.068137 4678 audit.go:125] 2017-01-25T05:15:14.068099299-05:00 AUDIT: id="12f82268-3499-4a97-8eff-0ac5424cbfec" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:14.069398 4678 audit.go:45] 2017-01-25T05:15:14.069387778-05:00 AUDIT: id="12f82268-3499-4a97-8eff-0ac5424cbfec" response="200" I0125 05:15:14.069478 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.956707ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:14.679206 4678 panics.go:76] GET /api/v1/watch/services?resourceVersion=10370&timeoutSeconds=369: (6m9.002557505s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:service-ingress-ip-controller] 172.18.7.222:50846] I0125 05:15:14.679462 4678 reflector.go:392] github.com/openshift/origin/pkg/service/controller/ingressip/controller.go:166: Watch close - *api.Service total 9 items received I0125 05:15:14.681649 4678 audit.go:125] 2017-01-25T05:15:14.681609337-05:00 AUDIT: id="04f859c0-5e7a-4080-b44f-10158f32bf00" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:service-ingress-ip-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/services?resourceVersion=10976&timeoutSeconds=406" I0125 05:15:14.682034 4678 audit.go:45] 2017-01-25T05:15:14.682024898-05:00 AUDIT: id="04f859c0-5e7a-4080-b44f-10158f32bf00" response="200" I0125 05:15:15.068350 4678 audit.go:125] 2017-01-25T05:15:15.068294251-05:00 AUDIT: id="8e02445e-106d-4c9a-9a4e-5e4b34f45dc4" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:15.069697 4678 audit.go:45] 2017-01-25T05:15:15.069678553-05:00 AUDIT: id="8e02445e-106d-4c9a-9a4e-5e4b34f45dc4" response="200" I0125 05:15:15.069780 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.201899ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:15.344665 4678 audit.go:125] 2017-01-25T05:15:15.344631459-05:00 AUDIT: id="506abb62-f5c2-4ef1-98c1-f20d255487ae" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:15:15.345636 4678 audit.go:45] 2017-01-25T05:15:15.345623671-05:00 AUDIT: id="506abb62-f5c2-4ef1-98c1-f20d255487ae" response="200" I0125 05:15:15.345706 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.273829ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:15.684619 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:16.068331 4678 audit.go:125] 2017-01-25T05:15:16.068277135-05:00 AUDIT: id="01cb37a4-df9f-415d-b2e9-874aea01f02b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:16.069649 4678 audit.go:45] 2017-01-25T05:15:16.069638004-05:00 AUDIT: id="01cb37a4-df9f-415d-b2e9-874aea01f02b" response="200" I0125 05:15:16.069736 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.138927ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:17.068192 4678 audit.go:125] 2017-01-25T05:15:17.068154005-05:00 AUDIT: id="641125cc-b9d6-4133-b1da-24045125d845" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:17.069473 4678 audit.go:45] 2017-01-25T05:15:17.069461234-05:00 AUDIT: id="641125cc-b9d6-4133-b1da-24045125d845" response="200" I0125 05:15:17.069554 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.956501ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:17.684606 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:18.068178 4678 audit.go:125] 2017-01-25T05:15:18.068135714-05:00 AUDIT: id="09ecf672-6616-422a-9be6-74ec54ea79e1" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:18.069479 4678 audit.go:45] 2017-01-25T05:15:18.069466094-05:00 AUDIT: id="09ecf672-6616-422a-9be6-74ec54ea79e1" response="200" I0125 05:15:18.069550 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.956462ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:18.410961 4678 audit.go:125] 2017-01-25T05:15:18.410921052-05:00 AUDIT: id="b0f6402d-3920-4780-a098-2e6b54f0d3c5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:18.411419 4678 audit.go:45] 2017-01-25T05:15:18.411409039-05:00 AUDIT: id="b0f6402d-3920-4780-a098-2e6b54f0d3c5" response="200" I0125 05:15:18.411763 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.067305ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:18.677663 4678 audit.go:125] 2017-01-25T05:15:18.677629059-05:00 AUDIT: id="de092e02-f472-45ba-a1a2-0be679befe26" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:15:18.678062 4678 audit.go:45] 2017-01-25T05:15:18.678052102-05:00 AUDIT: id="de092e02-f472-45ba-a1a2-0be679befe26" response="200" I0125 05:15:18.678405 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (982.511µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:18.745573 4678 audit.go:125] 2017-01-25T05:15:18.745535845-05:00 AUDIT: id="93471c3f-9344-4c11-96ef-6418c3cfb42a" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:15:18.747789 4678 audit.go:45] 2017-01-25T05:15:18.747775246-05:00 AUDIT: id="93471c3f-9344-4c11-96ef-6418c3cfb42a" response="200" I0125 05:15:18.748102 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.778227ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:18.749017 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:15:18.769111 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:18.769129 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:18.858375 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:15:19.068392 4678 audit.go:125] 2017-01-25T05:15:19.068350901-05:00 AUDIT: id="1e905790-107c-40f0-ad1a-1596ff120179" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:19.069724 4678 audit.go:45] 2017-01-25T05:15:19.069712575-05:00 AUDIT: id="1e905790-107c-40f0-ad1a-1596ff120179" response="200" I0125 05:15:19.069800 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.182006ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:19.134422 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:15:19.134444 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:15:19.134496 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:15:19.134506 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:15:19.134522 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:19.134537 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:19.134547 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:15:19.134539 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:19.134561 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:15:19.134562 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:19.134568 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:19.134573 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:15:19.134580 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:19.134605 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:19.134579 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:19.134704 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:19.134711 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:19.134715 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:19.134719 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:19.134749 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:19.134760 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:15:19.134802 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:15:19.134809 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:19.134821 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:19.134829 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:19.134862 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:15:19.261029 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:19.261056 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:19.261928 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:19.261946 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:19.262168 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc435261e20 -1 [] true false map[] 0xc42779b680 } I0125 05:15:19.262237 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:19.262512 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc435261f00 -1 [] true false map[] 0xc43310d860 } I0125 05:15:19.262535 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:19.299034 4678 audit.go:125] 2017-01-25T05:15:19.298994419-05:00 AUDIT: id="cf13a7fe-f4ba-4a88-a47b-52eaa0db0d0a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:15:19.299813 4678 audit.go:45] 2017-01-25T05:15:19.299802704-05:00 AUDIT: id="cf13a7fe-f4ba-4a88-a47b-52eaa0db0d0a" response="200" I0125 05:15:19.299881 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.699136ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:19.300103 4678 controller.go:106] Found 0 cronjobs I0125 05:15:19.301651 4678 audit.go:125] 2017-01-25T05:15:19.301632224-05:00 AUDIT: id="677d5219-67e1-4def-a47b-a76ce8872a11" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:15:19.302372 4678 audit.go:45] 2017-01-25T05:15:19.302361582-05:00 AUDIT: id="677d5219-67e1-4def-a47b-a76ce8872a11" response="200" I0125 05:15:19.302424 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.109446ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:19.302643 4678 controller.go:114] Found 0 jobs I0125 05:15:19.302654 4678 controller.go:117] Found 0 groups I0125 05:15:19.684633 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:20.068165 4678 audit.go:125] 2017-01-25T05:15:20.068126542-05:00 AUDIT: id="bd819949-63e4-4550-ad4a-9ea1b1bc5b40" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:20.069491 4678 audit.go:45] 2017-01-25T05:15:20.069478199-05:00 AUDIT: id="bd819949-63e4-4550-ad4a-9ea1b1bc5b40" response="200" I0125 05:15:20.069563 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.003127ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:21.068196 4678 audit.go:125] 2017-01-25T05:15:21.068147254-05:00 AUDIT: id="fe176e08-6446-46fe-99f9-1a6f43d1ba02" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:21.069561 4678 audit.go:45] 2017-01-25T05:15:21.069549938-05:00 AUDIT: id="fe176e08-6446-46fe-99f9-1a6f43d1ba02" response="200" I0125 05:15:21.069641 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.074399ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:21.684605 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:21.962575 4678 audit.go:125] 2017-01-25T05:15:21.962541259-05:00 AUDIT: id="0d94eb8e-55b1-4530-8755-dff8f098a772" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:15:21.963649 4678 audit.go:45] 2017-01-25T05:15:21.963638572-05:00 AUDIT: id="0d94eb8e-55b1-4530-8755-dff8f098a772" response="200" I0125 05:15:21.963719 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.388405ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:22.068159 4678 audit.go:125] 2017-01-25T05:15:22.068120569-05:00 AUDIT: id="c46e5353-9b10-4a9d-a945-e8051803e3cd" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:22.069652 4678 audit.go:45] 2017-01-25T05:15:22.069641129-05:00 AUDIT: id="c46e5353-9b10-4a9d-a945-e8051803e3cd" response="200" I0125 05:15:22.069732 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.192972ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:22.485810 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:15:22.486206 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:22.498867 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:22.546666 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:22.546705 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:22.904431 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:15:22.904772 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:22.905431 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:22.956189 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:22.956213 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:22.967135 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:22.967151 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:22.967931 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:22 GMT] Content-Length:[0]] 0xc435cf90e0 0 [] true false map[] 0xc42a7ab680 } I0125 05:15:22.967968 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:22.968038 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:22.968053 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:22.968811 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:22 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc435cf9380 0 [] true false map[] 0xc435068d20 } I0125 05:15:22.968853 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:23.068267 4678 audit.go:125] 2017-01-25T05:15:23.068214521-05:00 AUDIT: id="0edeedd5-013b-4a51-b3c4-0192913a553a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:23.069710 4678 audit.go:45] 2017-01-25T05:15:23.069698342-05:00 AUDIT: id="0edeedd5-013b-4a51-b3c4-0192913a553a" response="200" I0125 05:15:23.069794 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.222449ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:23.412908 4678 audit.go:125] 2017-01-25T05:15:23.412877153-05:00 AUDIT: id="acd47069-4a27-42a7-a53f-ff9ce6dfbbaf" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:23.413333 4678 audit.go:45] 2017-01-25T05:15:23.413324349-05:00 AUDIT: id="acd47069-4a27-42a7-a53f-ff9ce6dfbbaf" response="200" I0125 05:15:23.413669 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.01628ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:23.413911 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:15:23.605092 4678 panics.go:76] GET /api/v1/watch/configmaps?resourceVersion=4&timeoutSeconds=352: (5m52.000852338s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:23.605364 4678 reflector.go:392] github.com/openshift/origin/pkg/quota/controller/clusterquotareconciliation/reconciliation_controller.go:120: Watch close - *api.ConfigMap total 0 items received I0125 05:15:23.605932 4678 audit.go:125] 2017-01-25T05:15:23.605903579-05:00 AUDIT: id="d93f359b-a13b-46b3-8db7-81cbff9cec98" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/configmaps?resourceVersion=4&timeoutSeconds=364" I0125 05:15:23.606388 4678 audit.go:45] 2017-01-25T05:15:23.606374271-05:00 AUDIT: id="d93f359b-a13b-46b3-8db7-81cbff9cec98" response="200" I0125 05:15:23.684615 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:24.068082 4678 audit.go:125] 2017-01-25T05:15:24.068043656-05:00 AUDIT: id="e342c5cb-c9cb-42f5-903c-cd38bda2d8ea" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:24.069418 4678 audit.go:45] 2017-01-25T05:15:24.069405963-05:00 AUDIT: id="e342c5cb-c9cb-42f5-903c-cd38bda2d8ea" response="200" I0125 05:15:24.069494 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.985199ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:25.068000 4678 audit.go:125] 2017-01-25T05:15:25.067957614-05:00 AUDIT: id="55bff4a6-d683-46da-9853-dd7edb9dbde1" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:25.069326 4678 audit.go:45] 2017-01-25T05:15:25.069308401-05:00 AUDIT: id="55bff4a6-d683-46da-9853-dd7edb9dbde1" response="200" I0125 05:15:25.069395 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.892616ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:25.350413 4678 audit.go:125] 2017-01-25T05:15:25.350382239-05:00 AUDIT: id="e591428a-8ecc-433e-a8fe-9885edef0817" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:15:25.351429 4678 audit.go:45] 2017-01-25T05:15:25.351414922-05:00 AUDIT: id="e591428a-8ecc-433e-a8fe-9885edef0817" response="200" I0125 05:15:25.351497 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.338581ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:25.684649 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:26.068103 4678 audit.go:125] 2017-01-25T05:15:26.068064786-05:00 AUDIT: id="01020aa4-5f04-4e63-9baa-b246923503f9" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:26.069408 4678 audit.go:45] 2017-01-25T05:15:26.069393072-05:00 AUDIT: id="01020aa4-5f04-4e63-9baa-b246923503f9" response="200" I0125 05:15:26.069482 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.942801ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:26.643531 4678 panics.go:76] GET /apis/extensions/v1beta1/watch/deployments?resourceVersion=4&timeoutSeconds=432: (7m12.000985206s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:26.643796 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *extensions.Deployment total 0 items received I0125 05:15:26.644404 4678 audit.go:125] 2017-01-25T05:15:26.64437455-05:00 AUDIT: id="60609992-fe74-4616-aee5-85f38bc3c9e8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/watch/deployments?resourceVersion=4&timeoutSeconds=343" I0125 05:15:26.644766 4678 audit.go:45] 2017-01-25T05:15:26.644756163-05:00 AUDIT: id="60609992-fe74-4616-aee5-85f38bc3c9e8" response="200" I0125 05:15:27.068219 4678 audit.go:125] 2017-01-25T05:15:27.068155619-05:00 AUDIT: id="61da53a3-a1d1-47bf-8930-8822cdc423a4" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:27.069538 4678 audit.go:45] 2017-01-25T05:15:27.069523268-05:00 AUDIT: id="61da53a3-a1d1-47bf-8930-8822cdc423a4" response="200" I0125 05:15:27.069622 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.116004ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:27.684624 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:28.068148 4678 audit.go:125] 2017-01-25T05:15:28.068108061-05:00 AUDIT: id="565bea49-db52-4ca8-b373-c2ce85c58f17" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:28.069404 4678 audit.go:45] 2017-01-25T05:15:28.069392432-05:00 AUDIT: id="565bea49-db52-4ca8-b373-c2ce85c58f17" response="200" I0125 05:15:28.069483 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.951377ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:28.187289 4678 reflector.go:273] github.com/openshift/origin/pkg/user/cache/groups.go:51: forcing resync I0125 05:15:28.414783 4678 audit.go:125] 2017-01-25T05:15:28.414752567-05:00 AUDIT: id="47ecbb31-4637-436b-be9c-82f57447f01a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:28.415145 4678 audit.go:45] 2017-01-25T05:15:28.41513688-05:00 AUDIT: id="47ecbb31-4637-436b-be9c-82f57447f01a" response="200" I0125 05:15:28.415477 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (914.734µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:28.749463 4678 audit.go:125] 2017-01-25T05:15:28.749430782-05:00 AUDIT: id="101d8aea-e3b9-4e10-aa84-289e3f4263b3" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:15:28.749898 4678 audit.go:45] 2017-01-25T05:15:28.749888111-05:00 AUDIT: id="101d8aea-e3b9-4e10-aa84-289e3f4263b3" response="200" I0125 05:15:28.750229 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (997.693µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:28.815798 4678 audit.go:125] 2017-01-25T05:15:28.815765174-05:00 AUDIT: id="0e4a38be-afff-4363-9532-b0267e861f1d" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:15:28.817925 4678 audit.go:45] 2017-01-25T05:15:28.817910105-05:00 AUDIT: id="0e4a38be-afff-4363-9532-b0267e861f1d" response="200" I0125 05:15:28.818177 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.603873ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:28.819018 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:15:28.884013 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:28.884028 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:28.961698 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:15:29.068165 4678 audit.go:125] 2017-01-25T05:15:29.068121738-05:00 AUDIT: id="b41dcadc-5e7c-418a-bd2a-3da175810be6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:29.069550 4678 audit.go:45] 2017-01-25T05:15:29.069539419-05:00 AUDIT: id="b41dcadc-5e7c-418a-bd2a-3da175810be6" response="200" I0125 05:15:29.069616 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.097074ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:29.260979 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:29.261007 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:29.261701 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:29.261717 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:29.262032 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc4356cc2c0 -1 [] true false map[] 0xc43071a2d0 } I0125 05:15:29.262079 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:29.262294 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc434ca62c0 -1 [] true false map[] 0xc4292e0d20 } I0125 05:15:29.262316 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:29.305112 4678 audit.go:125] 2017-01-25T05:15:29.305068965-05:00 AUDIT: id="91901989-88c0-44dd-a634-458f35185161" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:15:29.306003 4678 audit.go:45] 2017-01-25T05:15:29.305992924-05:00 AUDIT: id="91901989-88c0-44dd-a634-458f35185161" response="200" I0125 05:15:29.306080 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.869639ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:29.306351 4678 controller.go:106] Found 0 cronjobs I0125 05:15:29.308046 4678 audit.go:125] 2017-01-25T05:15:29.308026649-05:00 AUDIT: id="cc7feb36-c607-4408-9b93-2072e97e8959" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:15:29.308822 4678 audit.go:45] 2017-01-25T05:15:29.308812436-05:00 AUDIT: id="cc7feb36-c607-4408-9b93-2072e97e8959" response="200" I0125 05:15:29.308885 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.326198ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:29.309097 4678 controller.go:114] Found 0 jobs I0125 05:15:29.309105 4678 controller.go:117] Found 0 groups I0125 05:15:29.684641 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:30.068078 4678 audit.go:125] 2017-01-25T05:15:30.068032797-05:00 AUDIT: id="8f84880d-1b25-4b9d-9165-56e6e43be861" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:30.069361 4678 audit.go:45] 2017-01-25T05:15:30.06934995-05:00 AUDIT: id="8f84880d-1b25-4b9d-9165-56e6e43be861" response="200" I0125 05:15:30.069437 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.965293ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:31.068419 4678 audit.go:125] 2017-01-25T05:15:31.068374282-05:00 AUDIT: id="343270c1-f027-407c-8837-1430b12a79e1" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:31.069897 4678 audit.go:45] 2017-01-25T05:15:31.069884861-05:00 AUDIT: id="343270c1-f027-407c-8837-1430b12a79e1" response="200" I0125 05:15:31.069988 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.366273ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:31.684620 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:31.964850 4678 audit.go:125] 2017-01-25T05:15:31.964807792-05:00 AUDIT: id="ecebe528-a808-4f0c-8536-907481149e8b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:15:31.965938 4678 audit.go:45] 2017-01-25T05:15:31.96592657-05:00 AUDIT: id="ecebe528-a808-4f0c-8536-907481149e8b" response="200" I0125 05:15:31.966025 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.435425ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:32.068501 4678 audit.go:125] 2017-01-25T05:15:32.06845404-05:00 AUDIT: id="19494e02-462b-4a67-a759-acb45b8eaad5" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:32.069958 4678 audit.go:45] 2017-01-25T05:15:32.069946795-05:00 AUDIT: id="19494e02-462b-4a67-a759-acb45b8eaad5" response="200" I0125 05:15:32.070038 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.465904ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:32.250423 4678 reflector.go:273] github.com/openshift/origin/pkg/project/auth/cache.go:201: forcing resync I0125 05:15:32.468921 4678 panics.go:76] GET /api/v1/watch/services?resourceVersion=10097&timeoutSeconds=519: (8m39.002348913s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:32.469190 4678 reflector.go:392] pkg/controller/endpoint/endpoints_controller.go:160: Watch close - *api.Service total 11 items received I0125 05:15:32.471592 4678 audit.go:125] 2017-01-25T05:15:32.471549904-05:00 AUDIT: id="1061a2c8-e9d4-42dc-ae59-eaaffd50ee9f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/services?resourceVersion=10976&timeoutSeconds=499" I0125 05:15:32.472009 4678 audit.go:45] 2017-01-25T05:15:32.471999415-05:00 AUDIT: id="1061a2c8-e9d4-42dc-ae59-eaaffd50ee9f" response="200" I0125 05:15:32.485772 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:15:32.486298 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:32.498874 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:32.571184 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:32.571211 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:32.904336 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:15:32.904725 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:32.905605 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:32.956511 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:32.956535 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:32.967135 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:32.967155 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:32.967852 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:32.967868 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:32.967954 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:32 GMT]] 0xc42f286880 0 [] true false map[] 0xc4339b24b0 } I0125 05:15:32.968005 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:32.968739 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc435da29c0 0 [] true false map[] 0xc43ae3b590 } I0125 05:15:32.968789 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:33.068283 4678 audit.go:125] 2017-01-25T05:15:33.068233662-05:00 AUDIT: id="5c3e808c-bab8-4d5c-911a-25a3eb7e8b6a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:33.069603 4678 audit.go:45] 2017-01-25T05:15:33.069591714-05:00 AUDIT: id="5c3e808c-bab8-4d5c-911a-25a3eb7e8b6a" response="200" I0125 05:15:33.069675 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.109237ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:33.339607 4678 panics.go:76] GET /api/v1/watch/services?resourceVersion=10521&timeoutSeconds=344: (5m44.000999529s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:33.339896 4678 reflector.go:392] pkg/kubelet/kubelet.go:378: Watch close - *api.Service total 7 items received I0125 05:15:33.340519 4678 audit.go:125] 2017-01-25T05:15:33.340490814-05:00 AUDIT: id="1f68df36-b033-40eb-87ce-97b73eca6ad9" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/watch/services?resourceVersion=10976&timeoutSeconds=391" I0125 05:15:33.340981 4678 audit.go:45] 2017-01-25T05:15:33.340971316-05:00 AUDIT: id="1f68df36-b033-40eb-87ce-97b73eca6ad9" response="200" I0125 05:15:33.416629 4678 audit.go:125] 2017-01-25T05:15:33.416593557-05:00 AUDIT: id="11bf88df-81ec-450c-9c39-486ca44c1ce9" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:33.417058 4678 audit.go:45] 2017-01-25T05:15:33.417048693-05:00 AUDIT: id="11bf88df-81ec-450c-9c39-486ca44c1ce9" response="200" I0125 05:15:33.417419 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.049983ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:33.417743 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:15:33.573539 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:15:33.578256 4678 audit.go:125] 2017-01-25T05:15:33.57821014-05:00 AUDIT: id="85364c06-dbb5-4e4c-b3be-ca65fde34d58" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:15:33.579214 4678 audit.go:125] 2017-01-25T05:15:33.579170221-05:00 AUDIT: id="6aedf16e-13e9-42b2-8815-da16d479327e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:15:33.579557 4678 audit.go:125] 2017-01-25T05:15:33.579516967-05:00 AUDIT: id="17ca8726-5a27-4537-b973-35f7d64e3b3e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:15:33.579632 4678 audit.go:125] 2017-01-25T05:15:33.579604213-05:00 AUDIT: id="2f06297d-00c0-4df4-9ca3-2e89fd1ecaf9" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:15:33.579559 4678 audit.go:125] 2017-01-25T05:15:33.579532424-05:00 AUDIT: id="926994f7-77b7-4b44-a3fb-22dc79f053a9" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:15:33.581028 4678 audit.go:45] 2017-01-25T05:15:33.581014277-05:00 AUDIT: id="85364c06-dbb5-4e4c-b3be-ca65fde34d58" response="200" I0125 05:15:33.581126 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (6.507783ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:33.581212 4678 audit.go:45] 2017-01-25T05:15:33.581185036-05:00 AUDIT: id="17ca8726-5a27-4537-b973-35f7d64e3b3e" response="200" I0125 05:15:33.581263 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (6.118282ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:33.581343 4678 audit.go:45] 2017-01-25T05:15:33.581330019-05:00 AUDIT: id="6aedf16e-13e9-42b2-8815-da16d479327e" response="200" I0125 05:15:33.581393 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (5.853231ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:33.581546 4678 audit.go:45] 2017-01-25T05:15:33.581534333-05:00 AUDIT: id="2f06297d-00c0-4df4-9ca3-2e89fd1ecaf9" response="200" I0125 05:15:33.581595 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (7.128624ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:33.581614 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (7.66263ms) I0125 05:15:33.581641 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (1.768µs) I0125 05:15:33.581754 4678 audit.go:45] 2017-01-25T05:15:33.581740759-05:00 AUDIT: id="926994f7-77b7-4b44-a3fb-22dc79f053a9" response="200" I0125 05:15:33.581798 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (6.748204ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:33.581915 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (7.86489ms) I0125 05:15:33.582008 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:15:33.582060 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (8.351519ms) I0125 05:15:33.582008 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (8.354607ms) I0125 05:15:33.583931 4678 audit.go:125] 2017-01-25T05:15:33.583907316-05:00 AUDIT: id="386e53c8-ed63-425a-a4ae-17f7572acee9" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:15:33.584442 4678 audit.go:45] 2017-01-25T05:15:33.584431844-05:00 AUDIT: id="386e53c8-ed63-425a-a4ae-17f7572acee9" response="200" I0125 05:15:33.584483 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (2.077982ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:15:33.584682 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (10.794543ms) I0125 05:15:33.651865 4678 reflector.go:273] github.com/openshift/origin/pkg/service/controller/servingcert/secret_creating_controller.go:118: forcing resync I0125 05:15:33.651938 4678 secret_creating_controller.go:103] Updating service docker-registry I0125 05:15:33.651951 4678 secret_creating_controller.go:103] Updating service kubernetes I0125 05:15:33.651956 4678 secret_creating_controller.go:103] Updating service router I0125 05:15:33.651962 4678 secret_creating_controller.go:103] Updating service postgresql-master I0125 05:15:33.651968 4678 secret_creating_controller.go:103] Updating service postgresql-slave I0125 05:15:33.651973 4678 secret_creating_controller.go:103] Updating service postgresql-helper I0125 05:15:33.652072 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:304: forcing resync I0125 05:15:33.652073 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:210: forcing resync I0125 05:15:33.652092 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:352: forcing resync I0125 05:15:33.652103 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/nodejs I0125 05:15:33.652104 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:90: forcing resync I0125 05:15:33.652115 4678 reflector.go:273] github.com/openshift/origin/pkg/build/controller/factory/factory.go:207: forcing resync I0125 05:15:33.652137 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/wildfly I0125 05:15:33.652181 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/mariadb I0125 05:15:33.652212 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/jenkins I0125 05:15:33.652241 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/ruby I0125 05:15:33.652271 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/php I0125 05:15:33.652299 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/redis I0125 05:15:33.652326 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/mongodb I0125 05:15:33.652364 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/perl I0125 05:15:33.652396 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/python I0125 05:15:33.652425 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/mysql I0125 05:15:33.652458 4678 image_change_controller.go:37] Build image change controller detected ImageStream change 172.30.17.116:5000/openshift/postgresql I0125 05:15:33.673895 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:15:33.684597 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:33.757806 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:15:33.757808 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:15:33.757821 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:15:33.785059 4678 reflector.go:273] github.com/openshift/origin/pkg/project/controller/factory.go:36: forcing resync I0125 05:15:33.815460 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:15:33.858494 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:15:33.989697 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:15:34.009756 4678 gc_controller.go:175] GC'ing orphaned I0125 05:15:34.009767 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:15:34.068148 4678 audit.go:125] 2017-01-25T05:15:34.068108568-05:00 AUDIT: id="fd380707-3732-4791-9ea0-4a9468051170" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:34.069502 4678 audit.go:45] 2017-01-25T05:15:34.069490518-05:00 AUDIT: id="fd380707-3732-4791-9ea0-4a9468051170" response="200" I0125 05:15:34.069588 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.078276ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:34.134707 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:15:34.134779 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:15:34.134805 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:34.134825 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:34.134851 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:15:34.134858 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:34.134865 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:34.134707 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:15:34.134929 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:34.134942 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:34.134950 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:34.134956 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:34.134969 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:15:34.134997 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:34.135005 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:15:34.135031 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:15:34.135060 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:15:34.135066 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:34.135088 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:34.135000 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:34.135102 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:15:34.135102 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:34.135118 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:34.135130 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:15:34.135135 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:34.135140 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:35.067988 4678 audit.go:125] 2017-01-25T05:15:35.067947874-05:00 AUDIT: id="b8adb9eb-2eb5-44e4-ba6b-6ddea7a91bb8" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:35.069272 4678 audit.go:45] 2017-01-25T05:15:35.069261138-05:00 AUDIT: id="b8adb9eb-2eb5-44e4-ba6b-6ddea7a91bb8" response="200" I0125 05:15:35.069344 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.85508ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:35.356078 4678 audit.go:125] 2017-01-25T05:15:35.356041619-05:00 AUDIT: id="ca56215a-d6c8-4848-9d4f-3628106ecab1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:15:35.357002 4678 audit.go:45] 2017-01-25T05:15:35.356991173-05:00 AUDIT: id="ca56215a-d6c8-4848-9d4f-3628106ecab1" response="200" I0125 05:15:35.357060 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.237663ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:35.684604 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:36.068054 4678 audit.go:125] 2017-01-25T05:15:36.068008638-05:00 AUDIT: id="3e703fe0-4704-4036-8823-9a4a5307e4a9" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:36.069368 4678 audit.go:45] 2017-01-25T05:15:36.06935534-05:00 AUDIT: id="3e703fe0-4704-4036-8823-9a4a5307e4a9" response="200" I0125 05:15:36.069434 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.933946ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:36.796995 4678 worker.go:45] 0 Health Check Listeners I0125 05:15:36.797025 4678 worker.go:46] 4 Services registered for health checking I0125 05:15:36.797030 4678 worker.go:50] Service default/docker-registry has 1 local endpoints I0125 05:15:36.797035 4678 worker.go:50] Service default/router has 1 local endpoints I0125 05:15:36.797040 4678 worker.go:50] Service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper has 1 local endpoints I0125 05:15:36.797045 4678 worker.go:50] Service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master has 1 local endpoints I0125 05:15:37.068124 4678 audit.go:125] 2017-01-25T05:15:37.068068557-05:00 AUDIT: id="da6b5da8-ae65-415d-89b8-a4cbe7e34f97" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:37.069477 4678 audit.go:45] 2017-01-25T05:15:37.06946642-05:00 AUDIT: id="da6b5da8-ae65-415d-89b8-a4cbe7e34f97" response="200" I0125 05:15:37.069544 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.050999ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:37.072631 4678 proxier.go:804] Syncing iptables rules I0125 05:15:37.072646 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:15:37.086998 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:15:37.097187 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:37.107194 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:37.116977 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:15:37.127743 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:15:37.138124 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:15:37.147796 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:15:37.158014 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:15:37.169081 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:15:37.169111 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:15:37.180891 4678 proxier.go:797] syncProxyRules took 108.258266ms I0125 05:15:37.180920 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:15:37.191187 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:15:37.200502 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:15:37.209520 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:15:37.218776 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:15:37.228246 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:15:37.237652 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:15:37.247245 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:15:37.256669 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:15:37.256763 4678 panics.go:76] GET /api/v1/watch/namespaces?resourceVersion=10540&timeoutSeconds=305: (5m5.001883148s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:37.261582 4678 reflector.go:392] github.com/openshift/origin/pkg/security/controller/factory.go:40: Watch close - *api.Namespace total 10 items received I0125 05:15:37.262655 4678 audit.go:125] 2017-01-25T05:15:37.262614939-05:00 AUDIT: id="4b8df67e-e07a-41f7-a429-e8622e36663c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/namespaces?resourceVersion=10903&timeoutSeconds=445" I0125 05:15:37.263446 4678 audit.go:45] 2017-01-25T05:15:37.263432566-05:00 AUDIT: id="4b8df67e-e07a-41f7-a429-e8622e36663c" response="200" I0125 05:15:37.267313 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:15:37.684600 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:38.068187 4678 audit.go:125] 2017-01-25T05:15:38.068141103-05:00 AUDIT: id="684cce8f-c7e7-4155-9c9f-3c1a3f0ad5a3" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:38.069654 4678 audit.go:45] 2017-01-25T05:15:38.069642937-05:00 AUDIT: id="684cce8f-c7e7-4155-9c9f-3c1a3f0ad5a3" response="200" I0125 05:15:38.069742 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.21616ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:38.144742 4678 factory.go:104] Error trying to work out if we can handle /system.slice/-.mount: invalid container name I0125 05:15:38.144764 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/-.mount" I0125 05:15:38.144773 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/-.mount", but ignoring. I0125 05:15:38.144781 4678 manager.go:867] ignoring container "/system.slice/-.mount" I0125 05:15:38.144807 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-daedc0da\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:15:38.144812 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:15:38.144820 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:15:38.144831 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-daedc0da\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:15:38.144841 4678 factory.go:104] Error trying to work out if we can handle /system.slice/run-user-1000.mount: invalid container name I0125 05:15:38.144844 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/run-user-1000.mount" I0125 05:15:38.144849 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/run-user-1000.mount", but ignoring. I0125 05:15:38.144854 4678 manager.go:867] ignoring container "/system.slice/run-user-1000.mount" I0125 05:15:38.144880 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-e932e61a\x2de2d9\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-registry\x2dtoken\x2dvjbst.mount: invalid container name I0125 05:15:38.144883 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:15:38.144891 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount", but ignoring. I0125 05:15:38.144900 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:15:38.144936 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-b76687cc\x2de2e6\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-default\x2dtoken\x2d0g2nw.mount: invalid container name I0125 05:15:38.144939 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:15:38.144946 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount", but ignoring. I0125 05:15:38.144955 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-b76687cc\\x2de2e6\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-default\\x2dtoken\\x2d0g2nw.mount" I0125 05:15:38.144967 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-debug.mount: invalid container name I0125 05:15:38.144970 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-debug.mount" I0125 05:15:38.144974 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-debug.mount", but ignoring. I0125 05:15:38.144980 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-debug.mount" I0125 05:15:38.144986 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-hugepages.mount: invalid container name I0125 05:15:38.144989 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-hugepages.mount" I0125 05:15:38.144993 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-hugepages.mount", but ignoring. I0125 05:15:38.144999 4678 manager.go:867] ignoring container "/system.slice/dev-hugepages.mount" I0125 05:15:38.145008 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir.mount: invalid container name I0125 05:15:38.145011 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:15:38.145015 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount", but ignoring. I0125 05:15:38.145020 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:15:38.145046 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-router\x2dtoken\x2ds79l8.mount: invalid container name I0125 05:15:38.145049 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:15:38.145059 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount", but ignoring. I0125 05:15:38.145069 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:15:38.145080 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-config.mount: invalid container name I0125 05:15:38.145083 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-config.mount" I0125 05:15:38.145089 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-config.mount", but ignoring. I0125 05:15:38.145094 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-config.mount" I0125 05:15:38.145101 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-mqueue.mount: invalid container name I0125 05:15:38.145104 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-mqueue.mount" I0125 05:15:38.145108 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-mqueue.mount", but ignoring. I0125 05:15:38.145113 4678 manager.go:867] ignoring container "/system.slice/dev-mqueue.mount" I0125 05:15:38.145138 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-server\x2dcertificate.mount: invalid container name I0125 05:15:38.145141 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:15:38.145148 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount", but ignoring. I0125 05:15:38.145157 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:15:38.418690 4678 audit.go:125] 2017-01-25T05:15:38.418652345-05:00 AUDIT: id="053f07d4-4ed8-4850-ab70-8dbca2d6a758" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:38.419072 4678 audit.go:45] 2017-01-25T05:15:38.419063575-05:00 AUDIT: id="053f07d4-4ed8-4850-ab70-8dbca2d6a758" response="200" I0125 05:15:38.419405 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (966.236µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:38.819579 4678 audit.go:125] 2017-01-25T05:15:38.819542363-05:00 AUDIT: id="78aca411-e242-4451-9d6c-264378e51523" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:15:38.819948 4678 audit.go:45] 2017-01-25T05:15:38.819936268-05:00 AUDIT: id="78aca411-e242-4451-9d6c-264378e51523" response="200" I0125 05:15:38.820263 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (902.829µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:38.885909 4678 audit.go:125] 2017-01-25T05:15:38.885878498-05:00 AUDIT: id="69305a29-7507-47bc-a957-e15cb58fb324" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:15:38.887981 4678 audit.go:45] 2017-01-25T05:15:38.88796667-05:00 AUDIT: id="69305a29-7507-47bc-a957-e15cb58fb324" response="200" I0125 05:15:38.888506 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.825099ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:38.889159 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:15:38.970521 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:38.970538 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:39.065350 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:15:39.068068 4678 audit.go:125] 2017-01-25T05:15:39.068011862-05:00 AUDIT: id="65548128-e620-4e22-80c9-dc1a6fc4a992" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:39.069402 4678 audit.go:45] 2017-01-25T05:15:39.069390973-05:00 AUDIT: id="65548128-e620-4e22-80c9-dc1a6fc4a992" response="200" I0125 05:15:39.069467 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.997065ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:39.260984 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:39.261014 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:39.261689 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:39.261705 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:39.262048 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc431540200 -1 [] true false map[] 0xc431347a40 } I0125 05:15:39.262101 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:39.262265 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42e2781a0 -1 [] true false map[] 0xc42aa5be00 } I0125 05:15:39.262290 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:39.311559 4678 audit.go:125] 2017-01-25T05:15:39.311525118-05:00 AUDIT: id="0780b1fb-ab66-4ec8-a2af-e86a6a7d7984" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:15:39.312384 4678 audit.go:45] 2017-01-25T05:15:39.312373935-05:00 AUDIT: id="0780b1fb-ab66-4ec8-a2af-e86a6a7d7984" response="200" I0125 05:15:39.312455 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.78454ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:39.312699 4678 controller.go:106] Found 0 cronjobs I0125 05:15:39.314347 4678 audit.go:125] 2017-01-25T05:15:39.314319624-05:00 AUDIT: id="1a3233e2-3f27-4f4f-b5df-14ce35839f31" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:15:39.315081 4678 audit.go:45] 2017-01-25T05:15:39.31506794-05:00 AUDIT: id="1a3233e2-3f27-4f4f-b5df-14ce35839f31" response="200" I0125 05:15:39.315153 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.284649ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:39.315496 4678 controller.go:114] Found 0 jobs I0125 05:15:39.315507 4678 controller.go:117] Found 0 groups I0125 05:15:39.684607 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:40.068070 4678 audit.go:125] 2017-01-25T05:15:40.068033687-05:00 AUDIT: id="8896925d-2ded-4ef7-85de-32c7566dc06b" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:40.069360 4678 audit.go:45] 2017-01-25T05:15:40.0693437-05:00 AUDIT: id="8896925d-2ded-4ef7-85de-32c7566dc06b" response="200" I0125 05:15:40.069436 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.916817ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:41.068167 4678 audit.go:125] 2017-01-25T05:15:41.068128307-05:00 AUDIT: id="ee4a4823-a19d-4d77-be0d-ab1cdc541be4" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:41.069495 4678 audit.go:45] 2017-01-25T05:15:41.069481055-05:00 AUDIT: id="ee4a4823-a19d-4d77-be0d-ab1cdc541be4" response="200" I0125 05:15:41.069563 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.038584ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:41.684598 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:15:41.684646 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:41.684753 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:41.684946 4678 status_manager.go:312] Ignoring same status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:25 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:12:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.8 StartTime:2017-01-25 05:12:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running:0xc431e017e0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} I0125 05:15:41.685064 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:41.772949 4678 secret.go:179] Setting up volume default-token-0g2nw for pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:15:41.773631 4678 audit.go:125] 2017-01-25T05:15:41.773599415-05:00 AUDIT: id="01fc1f03-5a6d-4494-ac9a-55ae23c98fde" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:15:41.774826 4678 audit.go:45] 2017-01-25T05:15:41.774815343-05:00 AUDIT: id="01fc1f03-5a6d-4494-ac9a-55ae23c98fde" response="200" I0125 05:15:41.775070 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (1.666677ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:41.775222 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:15:41.775433 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:15:41.775584 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:15:41.967100 4678 audit.go:125] 2017-01-25T05:15:41.967067399-05:00 AUDIT: id="3f55ebe9-aea4-450c-91fb-9ab7cd7db0e0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:15:41.968188 4678 audit.go:45] 2017-01-25T05:15:41.968177288-05:00 AUDIT: id="3f55ebe9-aea4-450c-91fb-9ab7cd7db0e0" response="200" I0125 05:15:41.968281 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.42131ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:41.985308 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:41.985820 4678 audit.go:125] 2017-01-25T05:15:41.985783719-05:00 AUDIT: id="b3fee14d-e70b-4c1e-8954-d0911a6be5c7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:15:41.986833 4678 audit.go:45] 2017-01-25T05:15:41.986822493-05:00 AUDIT: id="b3fee14d-e70b-4c1e-8954-d0911a6be5c7" response="200" I0125 05:15:41.987034 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (1.432631ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:41.987165 4678 docker_manager.go:1938] Found pod infra container for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:41.987239 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:41.987253 4678 docker_manager.go:1999] pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql" exists as 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208 I0125 05:15:41.987357 4678 docker_manager.go:2086] Got container changes for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017:-1 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208:0]} I0125 05:15:42.068136 4678 audit.go:125] 2017-01-25T05:15:42.068089895-05:00 AUDIT: id="76054ce6-c06f-4c82-b330-0154026205bb" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:42.069445 4678 audit.go:45] 2017-01-25T05:15:42.069432389-05:00 AUDIT: id="76054ce6-c06f-4c82-b330-0154026205bb" response="200" I0125 05:15:42.069519 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.009779ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:42.112223 4678 panics.go:76] GET /api/v1/watch/secrets?fieldSelector=type%3Dkubernetes.io%2Fservice-account-token&resourceVersion=10080&timeoutSeconds=482: (8m2.001088307s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:42.112459 4678 reflector.go:392] github.com/openshift/origin/vendor/k8s.io/kubernetes/plugin/pkg/admission/serviceaccount/admission.go:119: Watch close - *api.Secret total 97 items received I0125 05:15:42.113078 4678 audit.go:125] 2017-01-25T05:15:42.113045083-05:00 AUDIT: id="3effef24-fca2-479d-9965-0cf76cd90e8f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/secrets?fieldSelector=type%3Dkubernetes.io%2Fservice-account-token&resourceVersion=10931&timeoutSeconds=502" I0125 05:15:42.113532 4678 audit.go:45] 2017-01-25T05:15:42.11351772-05:00 AUDIT: id="3effef24-fca2-479d-9965-0cf76cd90e8f" response="200" I0125 05:15:42.485810 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:15:42.486188 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:42.498865 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:42.545931 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:42.545953 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:42.904433 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:15:42.904817 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:42.905485 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:42.953807 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:42.953832 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:42.967137 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:42.967153 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:42.967886 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Date:[Wed, 25 Jan 2017 10:15:42 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache]] 0xc431c742c0 0 [] true false map[] 0xc4330c0c30 } I0125 05:15:42.967944 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:42.968037 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:42.968055 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:42.968911 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:42 GMT]] 0xc431c94020 0 [] true false map[] 0xc42f4ca000 } I0125 05:15:42.968945 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:43.067996 4678 audit.go:125] 2017-01-25T05:15:43.067957558-05:00 AUDIT: id="6e0a3037-6872-482e-a7fc-043d5d102fac" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:43.069290 4678 audit.go:45] 2017-01-25T05:15:43.06927939-05:00 AUDIT: id="6e0a3037-6872-482e-a7fc-043d5d102fac" response="200" I0125 05:15:43.069365 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.823363ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:43.420471 4678 audit.go:125] 2017-01-25T05:15:43.420440751-05:00 AUDIT: id="1051b8ad-1440-476b-9a37-9dac31659dd1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:43.420879 4678 audit.go:45] 2017-01-25T05:15:43.420867208-05:00 AUDIT: id="1051b8ad-1440-476b-9a37-9dac31659dd1" response="200" I0125 05:15:43.421187 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (956.067µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:43.421404 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:15:43.684586 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:44.068135 4678 audit.go:125] 2017-01-25T05:15:44.0680789-05:00 AUDIT: id="76401595-7a7e-488f-8748-6f0b8a33ae18" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:44.069437 4678 audit.go:45] 2017-01-25T05:15:44.069425697-05:00 AUDIT: id="76401595-7a7e-488f-8748-6f0b8a33ae18" response="200" I0125 05:15:44.069512 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.020251ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:45.068038 4678 audit.go:125] 2017-01-25T05:15:45.067997831-05:00 AUDIT: id="746ae231-8e4a-45e5-bfec-19ea12fd60f9" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:45.069293 4678 audit.go:45] 2017-01-25T05:15:45.069282476-05:00 AUDIT: id="746ae231-8e4a-45e5-bfec-19ea12fd60f9" response="200" I0125 05:15:45.069361 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.858348ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:45.361995 4678 audit.go:125] 2017-01-25T05:15:45.361959269-05:00 AUDIT: id="e625c1d2-dec7-4d21-9300-e1e32d87f7cc" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:15:45.362957 4678 audit.go:45] 2017-01-25T05:15:45.362945813-05:00 AUDIT: id="e625c1d2-dec7-4d21-9300-e1e32d87f7cc" response="200" I0125 05:15:45.363017 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.26413ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:45.684597 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:46.067919 4678 audit.go:125] 2017-01-25T05:15:46.067882677-05:00 AUDIT: id="0a439e66-542a-4b04-becb-4a280b8169d3" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:46.069235 4678 audit.go:45] 2017-01-25T05:15:46.069224242-05:00 AUDIT: id="0a439e66-542a-4b04-becb-4a280b8169d3" response="200" I0125 05:15:46.069303 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.846891ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:47.068149 4678 audit.go:125] 2017-01-25T05:15:47.068110097-05:00 AUDIT: id="9fbed29f-c0e1-4ece-9ec8-c1576331d96c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:47.069478 4678 audit.go:45] 2017-01-25T05:15:47.069466751-05:00 AUDIT: id="9fbed29f-c0e1-4ece-9ec8-c1576331d96c" response="200" I0125 05:15:47.069557 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.016206ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:47.684588 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:48.067981 4678 audit.go:125] 2017-01-25T05:15:48.067945444-05:00 AUDIT: id="fd1a8e0b-129a-4e7b-9ef1-db40cc620614" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:48.069172 4678 audit.go:45] 2017-01-25T05:15:48.069160871-05:00 AUDIT: id="fd1a8e0b-129a-4e7b-9ef1-db40cc620614" response="200" I0125 05:15:48.069275 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.782485ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:48.422275 4678 audit.go:125] 2017-01-25T05:15:48.422246094-05:00 AUDIT: id="329cad7c-c88d-4d04-9c50-13250a446386" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:48.422679 4678 audit.go:45] 2017-01-25T05:15:48.422666663-05:00 AUDIT: id="329cad7c-c88d-4d04-9c50-13250a446386" response="200" I0125 05:15:48.422989 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (950.609µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:48.889603 4678 audit.go:125] 2017-01-25T05:15:48.889570686-05:00 AUDIT: id="29024934-caee-4789-8628-559f4b6f7b81" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:15:48.890018 4678 audit.go:45] 2017-01-25T05:15:48.890004753-05:00 AUDIT: id="29024934-caee-4789-8628-559f4b6f7b81" response="200" I0125 05:15:48.890345 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (966.105µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:48.958342 4678 audit.go:125] 2017-01-25T05:15:48.958312612-05:00 AUDIT: id="80b17e6c-688f-4e9c-ac3f-bea470765523" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:15:48.960358 4678 audit.go:45] 2017-01-25T05:15:48.960343701-05:00 AUDIT: id="80b17e6c-688f-4e9c-ac3f-bea470765523" response="200" I0125 05:15:48.961010 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.902546ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:48.961441 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:15:49.068180 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:49.068217 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:49.068304 4678 audit.go:125] 2017-01-25T05:15:49.068255278-05:00 AUDIT: id="7f10c18b-071a-40c4-a7da-14f46f3b7c2c" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:49.069869 4678 audit.go:45] 2017-01-25T05:15:49.069852667-05:00 AUDIT: id="7f10c18b-071a-40c4-a7da-14f46f3b7c2c" response="200" I0125 05:15:49.069958 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.480272ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:49.137258 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:15:49.137297 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:15:49.137345 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:15:49.137380 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:49.137413 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:49.137425 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:15:49.137433 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:49.137444 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:49.137517 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:49.137527 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:49.137535 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:49.137541 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:49.137586 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:49.137595 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:15:49.137635 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:15:49.137643 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:15:49.137662 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:49.137677 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:49.137692 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:15:49.137734 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:15:49.137757 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:15:49.137765 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:15:49.137782 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:15:49.137793 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:15:49.137799 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:15:49.137807 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:15:49.186459 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:15:49.260997 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:49.261023 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:49.261707 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:49.261724 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:49.262119 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc42f764180 -1 [] true false map[] 0xc42ee121e0 } I0125 05:15:49.262170 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:49.262335 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc42c6f1aa0 -1 [] true false map[] 0xc42c0afb30 } I0125 05:15:49.262358 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:49.317959 4678 audit.go:125] 2017-01-25T05:15:49.317929499-05:00 AUDIT: id="12bde326-ee7b-4e64-badb-c44d83583cdc" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:15:49.318799 4678 audit.go:45] 2017-01-25T05:15:49.318788802-05:00 AUDIT: id="12bde326-ee7b-4e64-badb-c44d83583cdc" response="200" I0125 05:15:49.318883 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.784171ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:49.319139 4678 controller.go:106] Found 0 cronjobs I0125 05:15:49.320646 4678 audit.go:125] 2017-01-25T05:15:49.320623281-05:00 AUDIT: id="9821c80c-39cf-4903-850d-e8f17ae8edbc" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:15:49.321311 4678 audit.go:45] 2017-01-25T05:15:49.321300869-05:00 AUDIT: id="9821c80c-39cf-4903-850d-e8f17ae8edbc" response="200" I0125 05:15:49.321372 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.039453ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:49.321585 4678 controller.go:114] Found 0 jobs I0125 05:15:49.321592 4678 controller.go:117] Found 0 groups I0125 05:15:49.684598 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:50.068069 4678 audit.go:125] 2017-01-25T05:15:50.068030022-05:00 AUDIT: id="df9783c1-514a-4384-89f6-2d2bb57b4b94" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:50.069334 4678 audit.go:45] 2017-01-25T05:15:50.069319074-05:00 AUDIT: id="df9783c1-514a-4384-89f6-2d2bb57b4b94" response="200" I0125 05:15:50.069404 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.932543ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:51.067927 4678 audit.go:125] 2017-01-25T05:15:51.067886296-05:00 AUDIT: id="b15d7d72-5de3-44f0-8c4e-2ecd602ecd27" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:51.069175 4678 audit.go:45] 2017-01-25T05:15:51.069162615-05:00 AUDIT: id="b15d7d72-5de3-44f0-8c4e-2ecd602ecd27" response="200" I0125 05:15:51.069264 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.780501ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:51.684598 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:51.969322 4678 audit.go:125] 2017-01-25T05:15:51.969290517-05:00 AUDIT: id="8e213246-71e3-41a8-831d-de4f9ab360fe" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:15:51.970336 4678 audit.go:45] 2017-01-25T05:15:51.970325484-05:00 AUDIT: id="8e213246-71e3-41a8-831d-de4f9ab360fe" response="200" I0125 05:15:51.970399 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.3319ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:52.068034 4678 audit.go:125] 2017-01-25T05:15:52.067978785-05:00 AUDIT: id="fb41c570-5d09-4bf2-86f1-df57e54a634f" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:52.069242 4678 audit.go:45] 2017-01-25T05:15:52.069231139-05:00 AUDIT: id="fb41c570-5d09-4bf2-86f1-df57e54a634f" response="200" I0125 05:15:52.069308 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.838169ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:52.485811 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:15:52.486234 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:52.498866 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:52.515258 4678 panics.go:76] GET /api/v1/watch/replicationcontrollers?resourceVersion=10241&timeoutSeconds=458: (7m38.002367558s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:15:52.515465 4678 reflector.go:392] pkg/controller/replication/replication_controller.go:220: Watch close - *api.ReplicationController total 88 items received I0125 05:15:52.518187 4678 audit.go:125] 2017-01-25T05:15:52.518140012-05:00 AUDIT: id="7fae9f4a-4bf0-404c-a4ef-82e3aabafb5d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/replicationcontrollers?resourceVersion=11164&timeoutSeconds=353" I0125 05:15:52.518679 4678 audit.go:45] 2017-01-25T05:15:52.51866357-05:00 AUDIT: id="7fae9f4a-4bf0-404c-a4ef-82e3aabafb5d" response="200" I0125 05:15:52.548900 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:52.548925 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:15:52.557774 4678 panics.go:76] GET /api/v1/watch/namespaces?resourceVersion=10540&timeoutSeconds=350: (5m50.003877181s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:15:52.557978 4678 reflector.go:392] pkg/controller/namespace/namespace_controller.go:212: Watch close - *api.Namespace total 10 items received I0125 05:15:52.560143 4678 audit.go:125] 2017-01-25T05:15:52.560113709-05:00 AUDIT: id="3fa98a0b-5ad4-481d-98c1-5056d50fa239" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="" uri="/api/v1/watch/namespaces?resourceVersion=10903&timeoutSeconds=328" I0125 05:15:52.560579 4678 audit.go:45] 2017-01-25T05:15:52.56056922-05:00 AUDIT: id="3fa98a0b-5ad4-481d-98c1-5056d50fa239" response="200" I0125 05:15:52.904413 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:15:52.904788 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:52.905442 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:15:52.954153 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:15:52.954179 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:15:52.967144 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:52.967173 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:52.967882 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:15:52.967899 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:52.967991 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:52 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc436587de0 0 [] true false map[] 0xc42eed0000 } I0125 05:15:52.968038 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:52.968524 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:15:52 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc4268896c0 0 [] true false map[] 0xc430c63860 } I0125 05:15:52.968563 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:15:53.068364 4678 audit.go:125] 2017-01-25T05:15:53.06831141-05:00 AUDIT: id="27daff80-2ca7-4864-b0fa-dab27bacda56" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:53.069761 4678 audit.go:45] 2017-01-25T05:15:53.069749771-05:00 AUDIT: id="27daff80-2ca7-4864-b0fa-dab27bacda56" response="200" I0125 05:15:53.069837 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.248818ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:53.424108 4678 audit.go:125] 2017-01-25T05:15:53.42407229-05:00 AUDIT: id="adc2c038-f75c-4d7e-9b18-cd59d488b5ea" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:53.424529 4678 audit.go:45] 2017-01-25T05:15:53.424519714-05:00 AUDIT: id="adc2c038-f75c-4d7e-9b18-cd59d488b5ea" response="200" I0125 05:15:53.424817 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (943.238µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:53.425085 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:15:53.619609 4678 panics.go:76] GET /api/v1/watch/replicationcontrollers?resourceVersion=10241&timeoutSeconds=465: (7m45.00088424s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:53.619816 4678 reflector.go:392] github.com/openshift/origin/pkg/quota/controller/clusterquotareconciliation/reconciliation_controller.go:120: Watch close - *api.ReplicationController total 88 items received I0125 05:15:53.620426 4678 audit.go:125] 2017-01-25T05:15:53.620386772-05:00 AUDIT: id="1b916e0b-b8f5-4b1c-8fe1-ab46019ec883" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/replicationcontrollers?resourceVersion=11164&timeoutSeconds=421" I0125 05:15:53.620840 4678 audit.go:45] 2017-01-25T05:15:53.620831147-05:00 AUDIT: id="1b916e0b-b8f5-4b1c-8fe1-ab46019ec883" response="200" I0125 05:15:53.684598 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:54.009967 4678 gc_controller.go:175] GC'ing orphaned I0125 05:15:54.009988 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:15:54.068139 4678 audit.go:125] 2017-01-25T05:15:54.068098183-05:00 AUDIT: id="201b611d-511f-4d8c-a393-dfa4e87cfdab" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:54.069521 4678 audit.go:45] 2017-01-25T05:15:54.069505031-05:00 AUDIT: id="201b611d-511f-4d8c-a393-dfa4e87cfdab" response="200" I0125 05:15:54.069606 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.069805ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:55.067998 4678 audit.go:125] 2017-01-25T05:15:55.067958834-05:00 AUDIT: id="1f05d6d7-11ce-49f9-9ca9-27d48bab7bfc" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:55.069257 4678 audit.go:45] 2017-01-25T05:15:55.069246412-05:00 AUDIT: id="1f05d6d7-11ce-49f9-9ca9-27d48bab7bfc" response="200" I0125 05:15:55.069329 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.782654ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:55.367851 4678 audit.go:125] 2017-01-25T05:15:55.36782116-05:00 AUDIT: id="72e870cb-a46a-439b-a698-1c37aefa14b2" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:15:55.368798 4678 audit.go:45] 2017-01-25T05:15:55.368786854-05:00 AUDIT: id="72e870cb-a46a-439b-a698-1c37aefa14b2" response="200" I0125 05:15:55.368869 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.253269ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:55.684603 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094) I0125 05:15:55.684657 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:55.684739 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:55.684925 4678 status_manager.go:312] Ignoring same status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:02 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:12 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 05:13:02 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:2017-01-25 05:13:02 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc42a6453c0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d}]} I0125 05:15:55.685053 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:55.685996 4678 audit.go:125] 2017-01-25T05:15:55.685955949-05:00 AUDIT: id="0b22bc35-5a1f-49e5-8388-fc1eb18a167c" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:15:55.687346 4678 audit.go:45] 2017-01-25T05:15:55.687331606-05:00 AUDIT: id="0b22bc35-5a1f-49e5-8388-fc1eb18a167c" response="200" I0125 05:15:55.687438 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (1.778407ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:55.688241 4678 audit.go:125] 2017-01-25T05:15:55.688193152-05:00 AUDIT: id="3d423d17-8535-4743-bd18-4c4b66afb51c" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:15:55.689395 4678 audit.go:45] 2017-01-25T05:15:55.689380022-05:00 AUDIT: id="3d423d17-8535-4743-bd18-4c4b66afb51c" response="200" I0125 05:15:55.689480 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (1.518471ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:55.699467 4678 secret.go:179] Setting up volume default-token-0g2nw for pod daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:15:55.699960 4678 audit.go:125] 2017-01-25T05:15:55.699934218-05:00 AUDIT: id="8641751a-c017-4062-af66-8015a024da09" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:15:55.700840 4678 audit.go:45] 2017-01-25T05:15:55.700817236-05:00 AUDIT: id="8641751a-c017-4062-af66-8015a024da09" response="200" I0125 05:15:55.701018 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (1.241697ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:55.701123 4678 secret.go:206] Received secret extended-test-postgresql-replication-1-34bbd-xd4g8/default-token-0g2nw containing (4) pieces of data, 4266 total bytes I0125 05:15:55.701304 4678 atomic_writer.go:142] pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k volume default-token-0g2nw: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:15:55.701397 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:15:55.985296 4678 volume_manager.go:365] All volumes are attached and mounted for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:55.985933 4678 audit.go:125] 2017-01-25T05:15:55.985897426-05:00 AUDIT: id="09da82d5-592c-4871-b7ea-2bb7f6611a2e" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:15:55.987122 4678 audit.go:45] 2017-01-25T05:15:55.987110966-05:00 AUDIT: id="09da82d5-592c-4871-b7ea-2bb7f6611a2e" response="200" I0125 05:15:55.987361 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (1.667386ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:55.987490 4678 docker_manager.go:1938] Found pod infra container for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:55.987542 4678 docker_manager.go:1951] Pod infra container looks good, keep it "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:15:55.987552 4678 docker_manager.go:1999] pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" container "postgresql-master" exists as ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d I0125 05:15:55.987654 4678 docker_manager.go:2086] Got container changes for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650:-1 ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d:0]} I0125 05:15:56.068136 4678 audit.go:125] 2017-01-25T05:15:56.068099245-05:00 AUDIT: id="5b88b598-ff5e-4894-8adb-0896b5c15427" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:56.069434 4678 audit.go:45] 2017-01-25T05:15:56.069419423-05:00 AUDIT: id="5b88b598-ff5e-4894-8adb-0896b5c15427" response="200" I0125 05:15:56.069518 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.966573ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:57.068191 4678 audit.go:125] 2017-01-25T05:15:57.06815155-05:00 AUDIT: id="385cd09a-02c7-45a9-a4f4-1baa8ed202c7" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:57.069450 4678 audit.go:45] 2017-01-25T05:15:57.069439528-05:00 AUDIT: id="385cd09a-02c7-45a9-a4f4-1baa8ed202c7" response="200" I0125 05:15:57.069518 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.992045ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:57.489845 4678 panics.go:76] GET /apis/policy/v1beta1/watch/poddisruptionbudgets?resourceVersion=4&timeoutSeconds=449: (7m29.002600661s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:disruption-controller] 172.18.7.222:50846] I0125 05:15:57.490084 4678 reflector.go:392] pkg/controller/disruption/disruption.go:281: Watch close - *policy.PodDisruptionBudget total 0 items received I0125 05:15:57.492274 4678 audit.go:125] 2017-01-25T05:15:57.492236334-05:00 AUDIT: id="27f908e0-b3a5-4c04-9c22-7c084aff4b16" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:disruption-controller" as="" asgroups="" namespace="" uri="/apis/policy/v1beta1/watch/poddisruptionbudgets?resourceVersion=4&timeoutSeconds=393" I0125 05:15:57.492584 4678 audit.go:45] 2017-01-25T05:15:57.49257031-05:00 AUDIT: id="27f908e0-b3a5-4c04-9c22-7c084aff4b16" response="200" I0125 05:15:57.684625 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:15:58.068159 4678 audit.go:125] 2017-01-25T05:15:58.068120317-05:00 AUDIT: id="12764c83-dd58-4e6f-9b26-592060c19c2a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:58.069427 4678 audit.go:45] 2017-01-25T05:15:58.069416023-05:00 AUDIT: id="12764c83-dd58-4e6f-9b26-592060c19c2a" response="200" I0125 05:15:58.069503 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.975133ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:58.425931 4678 audit.go:125] 2017-01-25T05:15:58.425900351-05:00 AUDIT: id="5c48ffd8-5618-4e42-87a8-34feffa65538" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:15:58.426331 4678 audit.go:45] 2017-01-25T05:15:58.426319872-05:00 AUDIT: id="5c48ffd8-5618-4e42-87a8-34feffa65538" response="200" I0125 05:15:58.426639 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (938.023µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:15:58.962084 4678 audit.go:125] 2017-01-25T05:15:58.962038333-05:00 AUDIT: id="cb8c557c-f548-4289-b0c4-88532f547992" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:15:58.962517 4678 audit.go:45] 2017-01-25T05:15:58.96250813-05:00 AUDIT: id="cb8c557c-f548-4289-b0c4-88532f547992" response="200" I0125 05:15:58.962839 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.003122ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:59.028295 4678 audit.go:125] 2017-01-25T05:15:59.028265895-05:00 AUDIT: id="1e136385-8921-4559-b5cb-6ede68448eb7" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:15:59.030424 4678 audit.go:45] 2017-01-25T05:15:59.030409363-05:00 AUDIT: id="1e136385-8921-4559-b5cb-6ede68448eb7" response="200" I0125 05:15:59.030675 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.632285ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:15:59.031482 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:15:59.068096 4678 audit.go:125] 2017-01-25T05:15:59.068054989-05:00 AUDIT: id="9141f6f4-af39-4ffb-bf82-7865d2357939" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:15:59.069440 4678 audit.go:45] 2017-01-25T05:15:59.069428936-05:00 AUDIT: id="9141f6f4-af39-4ffb-bf82-7865d2357939" response="200" I0125 05:15:59.069512 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.983175ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:15:59.201269 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:59.201286 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:15:59.265280 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:59.265313 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:59.265911 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:15:59.265926 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:15:59.272398 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc427a3da20 -1 [] true false map[] 0xc43282e690 } I0125 05:15:59.272446 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:59.272527 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc427a3db00 -1 [] true false map[] 0xc43282e870 } I0125 05:15:59.272553 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:15:59.306704 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:15:59.323974 4678 audit.go:125] 2017-01-25T05:15:59.323943693-05:00 AUDIT: id="73143aff-6f03-49f6-b054-cb889dee423b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:15:59.324706 4678 audit.go:45] 2017-01-25T05:15:59.324695073-05:00 AUDIT: id="73143aff-6f03-49f6-b054-cb889dee423b" response="200" I0125 05:15:59.324777 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.589987ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:59.324987 4678 controller.go:106] Found 0 cronjobs I0125 05:15:59.326719 4678 audit.go:125] 2017-01-25T05:15:59.326699501-05:00 AUDIT: id="19893e4f-747f-4749-9406-b6b363c79a29" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:15:59.327469 4678 audit.go:45] 2017-01-25T05:15:59.327459529-05:00 AUDIT: id="19893e4f-747f-4749-9406-b6b363c79a29" response="200" I0125 05:15:59.327540 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.382923ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:15:59.327750 4678 controller.go:114] Found 0 jobs I0125 05:15:59.327758 4678 controller.go:117] Found 0 groups I0125 05:15:59.684604 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:00.068227 4678 audit.go:125] 2017-01-25T05:16:00.068167964-05:00 AUDIT: id="96e093ba-0b8e-4f73-a641-d14c300a5e0e" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:00.069553 4678 audit.go:45] 2017-01-25T05:16:00.069538673-05:00 AUDIT: id="96e093ba-0b8e-4f73-a641-d14c300a5e0e" response="200" I0125 05:16:00.069630 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.051958ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:01.068151 4678 audit.go:125] 2017-01-25T05:16:01.068105202-05:00 AUDIT: id="ad73cd44-43b8-4933-a351-24a84436b4b6" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:01.069432 4678 audit.go:45] 2017-01-25T05:16:01.069421306-05:00 AUDIT: id="ad73cd44-43b8-4933-a351-24a84436b4b6" response="200" I0125 05:16:01.069499 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.94198ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:01.564734 4678 kubelet.go:1138] Container garbage collection succeeded I0125 05:16:01.684612 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:01.971486 4678 audit.go:125] 2017-01-25T05:16:01.971454005-05:00 AUDIT: id="421f0d30-74c1-43db-b8a8-a666769b08bc" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:16:01.972567 4678 audit.go:45] 2017-01-25T05:16:01.972556611-05:00 AUDIT: id="421f0d30-74c1-43db-b8a8-a666769b08bc" response="200" I0125 05:16:01.972652 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.409873ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:02.068150 4678 audit.go:125] 2017-01-25T05:16:02.068104566-05:00 AUDIT: id="37baa1a3-2024-4bab-9b96-a9e2214138f5" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:02.069429 4678 audit.go:45] 2017-01-25T05:16:02.069417148-05:00 AUDIT: id="37baa1a3-2024-4bab-9b96-a9e2214138f5" response="200" I0125 05:16:02.069506 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.952952ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:02.485819 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.2, Port: 5432, Timeout: 1s I0125 05:16:02.486183 4678 prober.go:113] Liveness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:16:02.498878 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql-master centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 [] [run-postgresql-master] [{ 0 5432 TCP }] [{POSTGRESQL_MASTER_USER master } {POSTGRESQL_MASTER_PASSWORD qcoktIqkwDX8 } {POSTGRESQL_USER user } {POSTGRESQL_PASSWORD IbyV1wgYrrMd } {POSTGRESQL_DATABASE userdb } {POSTGRESQL_ADMIN_PASSWORD newpass }] {map[] map[]} [{postgresql-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc428f3ea80 0xc428f3eab0 /dev/termination-log IfNotPresent 0xc428f3eae0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:16:02.546869 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:16:02.546892 4678 prober.go:113] Readiness probe for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql-master" succeeded I0125 05:16:02.666226 4678 iptables.go:362] running iptables -N [KUBE-MARK-DROP -t nat] I0125 05:16:02.685102 4678 iptables.go:362] running iptables -C [KUBE-MARK-DROP -t nat -j MARK --set-xmark 0x00008000/0x00008000] I0125 05:16:02.699583 4678 iptables.go:362] running iptables -N [KUBE-FIREWALL -t filter] I0125 05:16:02.714656 4678 iptables.go:362] running iptables -C [KUBE-FIREWALL -t filter -m comment --comment kubernetes firewall for dropping marked packets -m mark --mark 0x00008000/0x00008000 -j DROP] I0125 05:16:02.729474 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -j KUBE-FIREWALL] I0125 05:16:02.741447 4678 iptables.go:362] running iptables -C [INPUT -t filter -j KUBE-FIREWALL] I0125 05:16:02.752382 4678 iptables.go:362] running iptables -N [KUBE-MARK-MASQ -t nat] I0125 05:16:02.763967 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:02.775037 4678 iptables.go:362] running iptables -C [KUBE-MARK-MASQ -t nat -j MARK --set-xmark 0x00004000/0x00004000] I0125 05:16:02.785997 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:02.796979 4678 iptables.go:362] running iptables -C [KUBE-POSTROUTING -t nat -m comment --comment kubernetes service traffic requiring SNAT -m mark --mark 0x00004000/0x00004000 -j MASQUERADE] I0125 05:16:02.904444 4678 prober.go:170] TCP-Probe PodIP: 172.17.0.8, Port: 5432, Timeout: 1s I0125 05:16:02.904758 4678 prober.go:113] Liveness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:16:02.905457 4678 prober.go:145] Exec-Probe Pod: &TypeMeta{Kind:,APIVersion:,}, Container: {postgresql centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 [] [] [{ 0 5432 TCP }] [{POSTGRESQL_USER 0xc426a5c200} {POSTGRESQL_PASSWORD 0xc426a5c240} {POSTGRESQL_DATABASE sampledb }] {map[memory:{{536870912 0} {} BinarySI}] map[memory:{{536870912 0} {} BinarySI}]} [{postgresql-helper-data false /var/lib/pgsql/data } {default-token-0g2nw true /var/run/secrets/kubernetes.io/serviceaccount }] 0xc435846f90 0xc435846fc0 /dev/termination-log IfNotPresent 0xc435846ff0 false false false}, Command: [/bin/sh -i -c psql -h 127.0.0.1 -U $POSTGRESQL_USER -q -d $POSTGRESQL_DATABASE -c 'SELECT 1'] I0125 05:16:02.954508 4678 exec.go:38] Exec probe response: "sh: cannot set terminal process group (-1): Inappropriate ioctl for device\nsh: no job control in this shell\n ?column? \n----------\n 1\n(1 row)\n\n" I0125 05:16:02.954548 4678 prober.go:113] Readiness probe for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094):postgresql" succeeded I0125 05:16:02.967173 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:02.967197 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:02.968007 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:02 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc427311160 0 [] true false map[] 0xc43601c0f0 } I0125 05:16:02.968070 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:02.968318 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:02.968335 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:02.968903 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:02 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc427311280 0 [] true false map[] 0xc43601c2d0 } I0125 05:16:02.968942 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:03.068515 4678 audit.go:125] 2017-01-25T05:16:03.068470234-05:00 AUDIT: id="01adb6f3-ec21-4df5-9b28-bcc6f1fe32a0" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:03.069933 4678 audit.go:45] 2017-01-25T05:16:03.069922139-05:00 AUDIT: id="01adb6f3-ec21-4df5-9b28-bcc6f1fe32a0" response="200" I0125 05:16:03.070008 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.41995ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:03.427782 4678 audit.go:125] 2017-01-25T05:16:03.42774964-05:00 AUDIT: id="d01c5e40-f70e-4807-a6f7-31f800ef5a5b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:03.428191 4678 audit.go:45] 2017-01-25T05:16:03.428182074-05:00 AUDIT: id="d01c5e40-f70e-4807-a6f7-31f800ef5a5b" response="200" I0125 05:16:03.428516 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (987.029µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:03.428798 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:16:03.573750 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:16:03.574279 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (2.467µs) I0125 05:16:03.578972 4678 audit.go:125] 2017-01-25T05:16:03.578928613-05:00 AUDIT: id="b159575a-e08f-4e50-af02-27bda5b2b06f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:03.579338 4678 audit.go:125] 2017-01-25T05:16:03.579303861-05:00 AUDIT: id="686e1145-fa0a-4948-8832-a3929e2155ab" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:16:03.579669 4678 audit.go:125] 2017-01-25T05:16:03.579635289-05:00 AUDIT: id="5527eaf8-136e-4d0d-9abd-f7038436de05" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:03.579985 4678 audit.go:125] 2017-01-25T05:16:03.579957504-05:00 AUDIT: id="110289a7-8d88-4a7f-a433-d362eecdede0" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:16:03.580065 4678 audit.go:45] 2017-01-25T05:16:03.580052435-05:00 AUDIT: id="b159575a-e08f-4e50-af02-27bda5b2b06f" response="200" I0125 05:16:03.580146 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (4.889921ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:03.580312 4678 audit.go:125] 2017-01-25T05:16:03.580286535-05:00 AUDIT: id="cace6b1b-0281-4c12-b66d-8bbfe235f8c9" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:16:03.580790 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (6.909418ms) I0125 05:16:03.581476 4678 audit.go:45] 2017-01-25T05:16:03.581462556-05:00 AUDIT: id="110289a7-8d88-4a7f-a433-d362eecdede0" response="200" I0125 05:16:03.581527 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (6.231831ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:03.581762 4678 audit.go:45] 2017-01-25T05:16:03.581749753-05:00 AUDIT: id="686e1145-fa0a-4948-8832-a3929e2155ab" response="200" I0125 05:16:03.581813 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (7.141341ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:03.581827 4678 audit.go:45] 2017-01-25T05:16:03.581816926-05:00 AUDIT: id="cace6b1b-0281-4c12-b66d-8bbfe235f8c9" response="200" I0125 05:16:03.581871 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (6.163394ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:03.581760 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (7.454471ms) I0125 05:16:03.581982 4678 audit.go:45] 2017-01-25T05:16:03.58197044-05:00 AUDIT: id="5527eaf8-136e-4d0d-9abd-f7038436de05" response="200" I0125 05:16:03.582032 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (7.269634ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:03.582166 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (8.088572ms) I0125 05:16:03.582252 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave, ready: 0 not ready: 0 I0125 05:16:03.582281 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (8.359621ms) I0125 05:16:03.584126 4678 audit.go:125] 2017-01-25T05:16:03.584100185-05:00 AUDIT: id="93c37867-1c6e-4c24-815c-430ef8f3ccc7" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:16:03.585645 4678 audit.go:45] 2017-01-25T05:16:03.585632025-05:00 AUDIT: id="93c37867-1c6e-4c24-815c-430ef8f3ccc7" response="200" I0125 05:16:03.585695 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (3.053227ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:03.586085 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (12.168471ms) I0125 05:16:03.586194 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:03.586331 4678 proxier.go:804] Syncing iptables rules I0125 05:16:03.586342 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:03.594633 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:03.594719 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:03.594735 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:03.594748 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:03.594779 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:16:03.594822 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:03.594831 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:03.594839 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:03.594848 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper] I0125 05:16:03.600488 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:03.610862 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:03.621753 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:03.631889 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:03.641859 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:03.655778 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:03.672699 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:03.680846 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:16:03.684683 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:03.693930 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:03.709971 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:03.710003 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:03.723976 4678 proxier.go:797] syncProxyRules took 137.643643ms I0125 05:16:03.723999 4678 proxier.go:566] OnEndpointsUpdate took 137.736337ms for 6 endpoints I0125 05:16:03.724047 4678 proxier.go:381] Received update notice: [] I0125 05:16:03.724080 4678 proxier.go:804] Syncing iptables rules I0125 05:16:03.724090 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:03.736397 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:03.746066 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:03.757385 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:03.762493 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:16:03.762608 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:16:03.762771 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:16:03.768008 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:03.779104 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:03.789365 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:03.799208 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:03.811815 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:03.817037 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:16:03.824243 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SVC-T2TLQTY2NRIUTPUX -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -j KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -s 172.17.0.8/32 -j KUBE-MARK-MASQ -A KUBE-SEP-5EBQIEXSJBX7BRLN -m comment --comment extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql -m tcp -p tcp -j DNAT --to-destination 172.17.0.8:5432 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:03.824295 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:03.837738 4678 proxier.go:797] syncProxyRules took 113.652142ms I0125 05:16:03.837764 4678 proxier.go:431] OnServiceUpdate took 113.706807ms for 4 services I0125 05:16:03.858675 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:16:03.989902 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:16:04.068245 4678 audit.go:125] 2017-01-25T05:16:04.068176497-05:00 AUDIT: id="8b862eac-a316-44db-9654-5784c63a13da" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:04.069703 4678 audit.go:45] 2017-01-25T05:16:04.069692102-05:00 AUDIT: id="8b862eac-a316-44db-9654-5784c63a13da" response="200" I0125 05:16:04.069790 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.227401ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:04.137514 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:16:04.137575 4678 pv_controller_base.go:607] storeObjectUpdate updating claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" with version 10942 I0125 05:16:04.137594 4678 pv_controller.go:192] synchronizing PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:16:04.137611 4678 pv_controller.go:339] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" found: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:16:04.137618 4678 pv_controller.go:356] synchronizing bound PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: claim is already correctly bound I0125 05:16:04.137626 4678 pv_controller.go:838] binding volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:16:04.137633 4678 pv_controller.go:703] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: binding to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:16:04.137700 4678 pv_controller.go:761] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: already bound to "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:16:04.137710 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:16:04.137514 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:16:04.137718 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:16:04.137734 4678 pv_controller.go:768] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: binding to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:16:04.137783 4678 pv_controller.go:823] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim]: already bound to "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:16:04.137789 4678 pv_controller.go:546] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: set phase Bound I0125 05:16:04.137817 4678 pv_controller.go:594] updating PersistentVolumeClaim[extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim] status: phase Bound already set I0125 05:16:04.137822 4678 pv_controller.go:864] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" bound to claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" I0125 05:16:04.137835 4678 pv_controller.go:865] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" status after binding: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:16:04.137853 4678 pv_controller.go:866] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" status after binding: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:16:04.137907 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:16:04.137947 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 10938 I0125 05:16:04.137962 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:16:04.137967 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:16:04.137976 4678 pv_controller.go:421] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim found: phase: Bound, bound to: "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000", bindCompleted: true, boundByController: true I0125 05:16:04.137988 4678 pv_controller.go:482] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: all is bound I0125 05:16:04.137992 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Bound I0125 05:16:04.137997 4678 pv_controller.go:646] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase Bound already set I0125 05:16:04.607802 4678 panics.go:76] GET /oapi/v1/watch/builds?resourceVersion=10636&timeoutSeconds=321: (5m21.007619987s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783 system:serviceaccount:openshift-infra:build-controller] 172.18.7.222:50846] I0125 05:16:04.608104 4678 reflector.go:392] github.com/openshift/origin/pkg/build/controller/factory/factory.go:90: Watch close - *api.Build total 3 items received I0125 05:16:04.610237 4678 audit.go:125] 2017-01-25T05:16:04.61018471-05:00 AUDIT: id="55939c49-035b-4a61-9216-a4c51c2b38d1" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:build-controller" as="" asgroups="" namespace="" uri="/oapi/v1/watch/builds?resourceVersion=10734&timeoutSeconds=580" I0125 05:16:04.610697 4678 audit.go:45] 2017-01-25T05:16:04.610686073-05:00 AUDIT: id="55939c49-035b-4a61-9216-a4c51c2b38d1" response="200" I0125 05:16:05.068188 4678 audit.go:125] 2017-01-25T05:16:05.068138721-05:00 AUDIT: id="f7703429-7607-458c-b51e-116e3fc7412a" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:05.069599 4678 audit.go:45] 2017-01-25T05:16:05.06958827-05:00 AUDIT: id="f7703429-7607-458c-b51e-116e3fc7412a" response="200" I0125 05:16:05.069679 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.162635ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:05.374040 4678 audit.go:125] 2017-01-25T05:16:05.374009503-05:00 AUDIT: id="b5f3a1bc-b60a-4f3c-854c-a1971d56dc5c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:16:05.374964 4678 audit.go:45] 2017-01-25T05:16:05.374953686-05:00 AUDIT: id="b5f3a1bc-b60a-4f3c-854c-a1971d56dc5c" response="200" I0125 05:16:05.375042 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.560946ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:05.684639 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:06.068221 4678 audit.go:125] 2017-01-25T05:16:06.068180487-05:00 AUDIT: id="53778a96-8060-4dad-a7a8-056e7a6e56ee" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:06.069639 4678 audit.go:45] 2017-01-25T05:16:06.069627085-05:00 AUDIT: id="53778a96-8060-4dad-a7a8-056e7a6e56ee" response="200" I0125 05:16:06.069724 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (3.171539ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:06.071672 4678 audit.go:125] 2017-01-25T05:16:06.071619197-05:00 AUDIT: id="8a2d124a-8bfb-442c-9ee1-b4a1c6155b43" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1" I0125 05:16:06.072694 4678 audit.go:45] 2017-01-25T05:16:06.072678445-05:00 AUDIT: id="8a2d124a-8bfb-442c-9ee1-b4a1c6155b43" response="200" I0125 05:16:06.072747 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=deployment%3Dpostgresql-slave-1: (2.386597ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50942] I0125 05:16:06.286767 4678 audit.go:125] 2017-01-25T05:16:06.286722762-05:00 AUDIT: id="217e5dfb-43d2-486e-9c12-b315422858fa" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api" I0125 05:16:06.286951 4678 audit.go:45] 2017-01-25T05:16:06.286933959-05:00 AUDIT: id="217e5dfb-43d2-486e-9c12-b315422858fa" response="200" I0125 05:16:06.287054 4678 panics.go:76] GET /api: (525.354µs) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41072] I0125 05:16:06.287809 4678 audit.go:125] 2017-01-25T05:16:06.287790029-05:00 AUDIT: id="66958ec1-c01c-46dd-9e1c-2eab1fb2ff99" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis" I0125 05:16:06.287937 4678 audit.go:45] 2017-01-25T05:16:06.287928562-05:00 AUDIT: id="66958ec1-c01c-46dd-9e1c-2eab1fb2ff99" response="200" I0125 05:16:06.288161 4678 panics.go:76] GET /apis: (542.883µs) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41072] I0125 05:16:06.292989 4678 audit.go:125] 2017-01-25T05:16:06.292963174-05:00 AUDIT: id="aa5825d8-d507-49a7-a7e9-8451dd74d43b" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams" I0125 05:16:06.295723 4678 audit.go:45] 2017-01-25T05:16:06.295713591-05:00 AUDIT: id="aa5825d8-d507-49a7-a7e9-8451dd74d43b" response="200" I0125 05:16:06.297716 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams: (4.931752ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41072] I0125 05:16:06.507780 4678 audit.go:125] 2017-01-25T05:16:06.507744179-05:00 AUDIT: id="f6bcc596-38d7-47f2-876e-af6baf52b713" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams" I0125 05:16:06.508688 4678 audit.go:45] 2017-01-25T05:16:06.508677969-05:00 AUDIT: id="f6bcc596-38d7-47f2-876e-af6baf52b713" response="200" I0125 05:16:06.508753 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams: (2.628912ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41074] I0125 05:16:06.795220 4678 audit.go:125] 2017-01-25T05:16:06.795174915-05:00 AUDIT: id="c63ffca1-15a4-4f62-bf53-53a4f1d1dfb2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/buildconfigs" I0125 05:16:06.796405 4678 audit.go:45] 2017-01-25T05:16:06.796393865-05:00 AUDIT: id="c63ffca1-15a4-4f62-bf53-53a4f1d1dfb2" response="200" I0125 05:16:06.796473 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/buildconfigs: (1.500072ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:06.797313 4678 audit.go:125] 2017-01-25T05:16:06.797270361-05:00 AUDIT: id="d3a89d17-91bc-4ecf-b837-7e4520c1b221" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/builds" I0125 05:16:06.798250 4678 audit.go:45] 2017-01-25T05:16:06.79823968-05:00 AUDIT: id="d3a89d17-91bc-4ecf-b837-7e4520c1b221" response="200" I0125 05:16:06.798312 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/builds: (1.215671ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:06.798983 4678 audit.go:125] 2017-01-25T05:16:06.798958558-05:00 AUDIT: id="28b31277-6079-40ea-aa7b-96748528522d" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams" I0125 05:16:06.799787 4678 audit.go:45] 2017-01-25T05:16:06.79977711-05:00 AUDIT: id="28b31277-6079-40ea-aa7b-96748528522d" response="200" I0125 05:16:06.799832 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams: (1.046949ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:06.800467 4678 audit.go:125] 2017-01-25T05:16:06.800445281-05:00 AUDIT: id="d2dc95b7-0a68-4c53-a113-aa7b247bfb3a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs" I0125 05:16:06.801702 4678 audit.go:45] 2017-01-25T05:16:06.801692027-05:00 AUDIT: id="d2dc95b7-0a68-4c53-a113-aa7b247bfb3a" response="200" I0125 05:16:06.802279 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs: (2.003445ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:06.805141 4678 audit.go:125] 2017-01-25T05:16:06.805115586-05:00 AUDIT: id="fca43dc6-a2cf-41d4-96cf-262bcd7cc2a4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper" I0125 05:16:06.805974 4678 audit.go:45] 2017-01-25T05:16:06.805965109-05:00 AUDIT: id="fca43dc6-a2cf-41d4-96cf-262bcd7cc2a4" response="200" I0125 05:16:06.806149 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper: (1.18518ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:06.814674 4678 audit.go:125] 2017-01-25T05:16:06.814646158-05:00 AUDIT: id="73b5e1e9-79d2-473e-a168-eb9de7eb79ab" ip="172.18.7.222" method="PUT" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper" I0125 05:16:06.816921 4678 audit.go:45] 2017-01-25T05:16:06.816906263-05:00 AUDIT: id="73b5e1e9-79d2-473e-a168-eb9de7eb79ab" response="200" I0125 05:16:06.817147 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper: (2.662783ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:06.817579 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:16:06.818423 4678 audit.go:125] 2017-01-25T05:16:06.818389921-05:00 AUDIT: id="aa1b80f8-212d-4c6b-845e-5188407af73a" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:16:06.820417 4678 audit.go:45] 2017-01-25T05:16:06.820403293-05:00 AUDIT: id="aa1b80f8-212d-4c6b-845e-5188407af73a" response="200" I0125 05:16:06.820687 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (2.481932ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:06.821727 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:16:06.82171446 -0500 EST. I0125 05:16:06.822180 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:16:06.822591 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 1->0 I0125 05:16:06.822678 4678 replication_controller.go:585] Too many "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-helper-1" replicas, need 0, deleting 1 I0125 05:16:06.822696 4678 controller_utils.go:306] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 waiting on deletions for: [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d] I0125 05:16:06.822711 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:0, del:1, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1", timestamp:time.Time{sec:63620936166, nsec:822708567, loc:(*time.Location)(0xa2479e0)}} I0125 05:16:06.822764 4678 controller_utils.go:523] Controller postgresql-helper-1 deleting pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d I0125 05:16:06.823126 4678 audit.go:125] 2017-01-25T05:16:06.823091272-05:00 AUDIT: id="f839d2a4-8ad7-4744-abc3-15ec6930246b" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:16:06.824345 4678 audit.go:125] 2017-01-25T05:16:06.824312137-05:00 AUDIT: id="ae8277f5-5679-45a8-8e88-107d21e176b2" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:06.825776 4678 audit.go:45] 2017-01-25T05:16:06.825762029-05:00 AUDIT: id="f839d2a4-8ad7-4744-abc3-15ec6930246b" response="200" I0125 05:16:06.825779 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:16:06.825898 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (3.031011ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:06.826456 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 3) I0125 05:16:06.826550 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d ready condition last transition time 2017-01-25 05:12:25 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:16:06.826539337 -0500 EST. I0125 05:16:06.826880 4678 audit.go:125] 2017-01-25T05:16:06.826846299-05:00 AUDIT: id="67c2165a-4554-4bf7-bdd9-feeb96f0e0f9" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:16:06.827036 4678 audit.go:45] 2017-01-25T05:16:06.827023482-05:00 AUDIT: id="ae8277f5-5679-45a8-8e88-107d21e176b2" response="201" I0125 05:16:06.827086 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.035938ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:06.828914 4678 audit.go:45] 2017-01-25T05:16:06.828898411-05:00 AUDIT: id="67c2165a-4554-4bf7-bdd9-feeb96f0e0f9" response="200" I0125 05:16:06.829000 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (5.844887ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:06.829309 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 1->1 (need 0), fullyLabeledReplicas 1->1, readyReplicas 1->1, availableReplicas 1->1, sequence No: 2->3 I0125 05:16:06.829647 4678 config.go:281] Setting pods for source api I0125 05:16:06.829804 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1", UID:"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11211", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: postgresql-helper-1-cpv6d I0125 05:16:06.830856 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:06.830919 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:06.831253 4678 docker_manager.go:1536] Killing container "1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208 postgresql extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d" with 30 second grace period I0125 05:16:06.831495 4678 replication_controller.go:378] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11059 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11214 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc42bcb5958 Labels:map[deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:06.831620 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:36.827925649 -0500 EST, labels map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper]. I0125 05:16:06.831678 4678 controller_utils.go:320] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 received delete for pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d I0125 05:16:06.831687 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1", timestamp:time.Time{sec:63620936166, nsec:822708567, loc:(*time.Location)(0xa2479e0)}} I0125 05:16:06.831784 4678 audit.go:125] 2017-01-25T05:16:06.831750893-05:00 AUDIT: id="629d93e6-f4ee-40b8-9fd1-2b304c77a053" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:16:06.831720 4678 replica_set.go:320] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11059 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11214 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc42bcb5958 Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:06.831822 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:36.827925649 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-cpv6d", GenerateName:"postgresql-helper-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11214", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:868420924, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42b63f840), DeletionGracePeriodSeconds:(*int64)(0xc42bcb5958), Labels:map[string]string{"name":"postgresql-helper", "app":"postgresql-ephemeral", "deployment":"postgresql-helper-1", "deploymentconfig":"postgresql-helper"}, Annotations:map[string]string{"openshift.io/deployment-config.name":"postgresql-helper", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-helper-1\",\"uid\":\"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11028\"}}\n", "openshift.io/deployment.name":"postgresql-helper-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-helper-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc42bcb5a50), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4272c6f60), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql", Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_USER", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42b63f9e0)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42b63fa60)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"sampledb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}, Requests:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-helper-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc4272c7050), ReadinessProbe:(*api.Probe)(0xc4272c7080), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4272c70b0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc42bb3e1a0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc42822fa80), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935945, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.8", StartTime:(*unversioned.Time)(0xc42b63fc20), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(0xc42b63fc40), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:true, RestartCount:0, Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ImageID:"docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ContainerID:"docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208"}}}}. I0125 05:16:06.832132 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:16:06.832218 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:16:06.832239 4678 daemoncontroller.go:332] Pod postgresql-helper-1-cpv6d updated. I0125 05:16:06.832300 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:16:06.832324 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-cpv6d" I0125 05:16:06.832371 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:16:06.832378 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:16:06.832666 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:16:06.834570 4678 audit.go:45] 2017-01-25T05:16:06.834556232-05:00 AUDIT: id="629d93e6-f4ee-40b8-9fd1-2b304c77a053" response="200" I0125 05:16:06.834657 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (3.146176ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:06.834704 4678 audit.go:125] 2017-01-25T05:16:06.834673037-05:00 AUDIT: id="f817cc3c-566a-4524-bb79-26c978fc3b1b" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:16:06.835810 4678 audit.go:125] 2017-01-25T05:16:06.835766509-05:00 AUDIT: id="234a1a32-1717-4a4f-976f-d8e5ff47181e" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:16:06.836806 4678 audit.go:125] 2017-01-25T05:16:06.836771555-05:00 AUDIT: id="d9adb615-2595-4534-96db-3565ec80e52d" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:06.838394 4678 audit.go:45] 2017-01-25T05:16:06.838380567-05:00 AUDIT: id="f817cc3c-566a-4524-bb79-26c978fc3b1b" response="200" I0125 05:16:06.838511 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (4.066138ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:06.838959 4678 audit.go:125] 2017-01-25T05:16:06.838926409-05:00 AUDIT: id="5d7b222b-b6a3-4cc3-a5e7-a65f6417e9cb" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status" I0125 05:16:06.839712 4678 audit.go:45] 2017-01-25T05:16:06.839697977-05:00 AUDIT: id="5d7b222b-b6a3-4cc3-a5e7-a65f6417e9cb" response="200" I0125 05:16:06.839796 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status: (1.098826ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:06.840341 4678 audit.go:45] 2017-01-25T05:16:06.840327411-05:00 AUDIT: id="234a1a32-1717-4a4f-976f-d8e5ff47181e" response="200" I0125 05:16:06.840357 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 3) I0125 05:16:06.840699 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:16:06.841377 4678 audit.go:125] 2017-01-25T05:16:06.841341716-05:00 AUDIT: id="3660c353-da4d-4b29-853a-507aaf747ea1" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:06.841614 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (11.486225ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:06.841867 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 0->0 I0125 05:16:06.841852 4678 status_manager.go:425] Status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935945 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.8 StartTime:0xc426a5ce80 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running:0xc42b14c720 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} version:4 podName:postgresql-helper-1-cpv6d podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:16:06.841909 4678 status_manager.go:435] Pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" is terminated, but some containers are still running I0125 05:16:06.842050 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (19.403852ms) I0125 05:16:06.842103 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d in state Running, deletion time 2017-01-25 05:16:36.827925649 -0500 EST I0125 05:16:06.842132 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 1->0, availableReplicas 1->0, sequence No: 3->3 I0125 05:16:06.842487 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:16:06.843901 4678 audit.go:45] 2017-01-25T05:16:06.843887324-05:00 AUDIT: id="d9adb615-2595-4534-96db-3565ec80e52d" response="201" I0125 05:16:06.843955 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (13.341968ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:06.844222 4678 audit.go:45] 2017-01-25T05:16:06.84419311-05:00 AUDIT: id="3660c353-da4d-4b29-853a-507aaf747ea1" response="200" I0125 05:16:06.844287 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (10.779227ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:06.844551 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:16:06.844920 4678 audit.go:125] 2017-01-25T05:16:06.844883965-05:00 AUDIT: id="a49a024d-ffbc-4b85-9aa3-e6ceb3690212" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status" I0125 05:16:06.846683 4678 audit.go:125] 2017-01-25T05:16:06.846651662-05:00 AUDIT: id="b2c24b6d-8f3c-40c6-9bf0-60512b94d93c" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:06.848454 4678 audit.go:45] 2017-01-25T05:16:06.848440876-05:00 AUDIT: id="b2c24b6d-8f3c-40c6-9bf0-60512b94d93c" response="200" I0125 05:16:06.848511 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (3.612068ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:06.848934 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:06.849003 4678 roundrobin.go:275] LoadBalancerRR: Removing endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql I0125 05:16:06.849083 4678 proxier.go:631] Removing endpoints for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql" I0125 05:16:06.849106 4678 proxier.go:804] Syncing iptables rules I0125 05:16:06.849115 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:06.863111 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:06.863315 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 1 Endpoints [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master] I0125 05:16:06.863384 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:06.863405 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:06.863426 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:06.863451 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper - 0 Endpoints [] I0125 05:16:06.863473 4678 healthcheck.go:89] Deleting endpoints map for service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, all local endpoints gone I0125 05:16:06.863511 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:06.863534 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:06.863557 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:06.864641 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (32.193924ms) I0125 05:16:06.867935 4678 audit.go:45] 2017-01-25T05:16:06.867914027-05:00 AUDIT: id="a49a024d-ffbc-4b85-9aa3-e6ceb3690212" response="200" I0125 05:16:06.869391 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1/status: (26.533261ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:06.869986 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 0->0 I0125 05:16:06.870013 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-helper-1, 1->0 I0125 05:16:06.870293 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (28.212245ms) I0125 05:16:06.870426 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d in state Running, deletion time 2017-01-25 05:16:36.827925649 -0500 EST I0125 05:16:06.870456 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (99.808µs) I0125 05:16:06.871380 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:16:06.874037 4678 audit.go:125] 2017-01-25T05:16:06.873908694-05:00 AUDIT: id="b26d855c-d7ea-465a-9be9-5a44e99b0662" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status" I0125 05:16:06.893504 4678 audit.go:45] 2017-01-25T05:16:06.893479874-05:00 AUDIT: id="b26d855c-d7ea-465a-9be9-5a44e99b0662" response="200" I0125 05:16:06.893653 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper/status: (20.347307ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:06.894111 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" (observed generation: 3) I0125 05:16:06.894350 4678 factory.go:122] Updating deployment config "postgresql-helper" I0125 05:16:06.896715 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:06.925548 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:06.959890 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:06.986768 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:06.998991 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:07.021327 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:07.046447 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:07.059407 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:07.072153 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SEP-5EBQIEXSJBX7BRLN - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -X KUBE-SEP-5EBQIEXSJBX7BRLN -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:07.072184 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:07.080444 4678 docker_manager.go:1577] Container "1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208 postgresql extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d" exited after 249.167327ms I0125 05:16:07.081407 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11031", FieldPath:"spec.containers{postgresql}"}): type: 'Normal' reason: 'Killing' Killing container with docker id 1ebc67751226: Need to kill pod. I0125 05:16:07.082211 4678 audit.go:125] 2017-01-25T05:16:07.082159105-05:00 AUDIT: id="b4adc06e-4bef-41bd-911e-ed76e2c8e6b5" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:07.085542 4678 audit.go:45] 2017-01-25T05:16:07.085527004-05:00 AUDIT: id="b4adc06e-4bef-41bd-911e-ed76e2c8e6b5" response="201" I0125 05:16:07.085634 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (3.78439ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:07.088622 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:16:07.090619 4678 docker_manager.go:1536] Killing container "969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d" with 30 second grace period I0125 05:16:07.093374 4678 proxier.go:797] syncProxyRules took 244.266577ms I0125 05:16:07.093398 4678 proxier.go:566] OnEndpointsUpdate took 244.379293ms for 6 endpoints I0125 05:16:07.093434 4678 proxier.go:381] Received update notice: [] I0125 05:16:07.093472 4678 proxier.go:804] Syncing iptables rules I0125 05:16:07.093483 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:07.119658 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:07.132775 4678 generic.go:145] GenericPLEG: b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208: running -> exited I0125 05:16:07.139540 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42a47e840 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~empty-dir/postgresql-helper-data Destination:/var/lib/pgsql/data Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql/422ec933 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42cdbafc0 NetworkSettings:0xc42d061c00} I0125 05:16:07.143339 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:07.168389 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:07.180654 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:07.197140 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:07.213317 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:07.231947 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:07.258371 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:07.281776 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:07.281809 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:07.307598 4678 proxier.go:797] syncProxyRules took 214.120318ms I0125 05:16:07.307637 4678 proxier.go:431] OnServiceUpdate took 214.189683ms for 4 services I0125 05:16:07.307659 4678 proxier.go:804] Syncing iptables rules I0125 05:16:07.307670 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:07.327105 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:07.342554 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:07.354556 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc42a47f8c0 Mounts:[] Config:0xc42cdbbb00 NetworkSettings:0xc43738c500} I0125 05:16:07.354825 4678 docker_manager.go:1577] Container "969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d" exited after 264.170026ms I0125 05:16:07.354900 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:07.361423 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-cpv6d/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-cpv6d", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc428bf3260), (*container.ContainerStatus)(0xc426ffa540)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:16:07.361561 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208"} I0125 05:16:07.367581 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:07.382666 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:07.400825 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:07.418933 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:07.437733 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:07.455739 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:07.455773 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:07.491966 4678 proxier.go:797] syncProxyRules took 184.304192ms I0125 05:16:07.492012 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:16:07.509797 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:16:07.527582 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:16:07.541090 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:16:07.551572 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:16:07.561195 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:16:07.571000 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:16:07.582067 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:16:07.597442 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:16:07.607806 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:16:07.655441 4678 panics.go:76] GET /apis/batch/v1/watch/jobs?resourceVersion=4&timeoutSeconds=409: (6m49.000829556s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:07.655745 4678 reflector.go:392] pkg/controller/informers/factory.go:89: Watch close - *batch.Job total 0 items received I0125 05:16:07.656446 4678 audit.go:125] 2017-01-25T05:16:07.656416636-05:00 AUDIT: id="c929ec63-609f-4439-b6fb-e4a536e3cb55" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/batch/v1/watch/jobs?resourceVersion=4&timeoutSeconds=441" I0125 05:16:07.656858 4678 audit.go:45] 2017-01-25T05:16:07.656848596-05:00 AUDIT: id="c929ec63-609f-4439-b6fb-e4a536e3cb55" response="200" I0125 05:16:07.684640 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094) I0125 05:16:07.684692 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:07.684870 4678 kubelet_pods.go:1029] Generating status for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:07.685062 4678 status_manager.go:312] Ignoring same status for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:42 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:40:22 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.3 StartTime:2017-01-25 03:40:22 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:registry State:{Waiting: Running:0xc433cd03a0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-docker-registry:86a9783 ImageID:docker://sha256:3ec55bd72e2d99d049485e7f0556140392c415053ffba63b99bdeca83d4e5b7f ContainerID:docker://b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b}]} I0125 05:16:07.685178 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:07.693425 4678 secret.go:179] Setting up volume registry-token-vjbst for pod e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:16:07.694393 4678 audit.go:125] 2017-01-25T05:16:07.694365229-05:00 AUDIT: id="9b5ac430-ff5a-4193-ab52-1b494295e1a7" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-token-vjbst" I0125 05:16:07.695635 4678 audit.go:45] 2017-01-25T05:16:07.695625241-05:00 AUDIT: id="9b5ac430-ff5a-4193-ab52-1b494295e1a7" response="200" I0125 05:16:07.695861 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-token-vjbst: (1.712585ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:07.695994 4678 secret.go:206] Received secret default/registry-token-vjbst containing (4) pieces of data, 4113 total bytes I0125 05:16:07.696300 4678 atomic_writer.go:142] pod default/docker-registry-1-xppm3 volume registry-token-vjbst: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/registry-token-vjbst I0125 05:16:07.696413 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094-registry-token-vjbst" (spec.Name: "registry-token-vjbst") pod "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094" (UID: "e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094"). I0125 05:16:07.818679 4678 audit.go:125] 2017-01-25T05:16:07.818637906-05:00 AUDIT: id="ca572c70-c1bb-44b3-b13a-b7f2128fd390" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper" I0125 05:16:07.820009 4678 audit.go:45] 2017-01-25T05:16:07.819998718-05:00 AUDIT: id="ca572c70-c1bb-44b3-b13a-b7f2128fd390" response="200" I0125 05:16:07.820280 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper: (1.842429ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:07.821446 4678 audit.go:125] 2017-01-25T05:16:07.821414999-05:00 AUDIT: id="54885d39-93ed-48a8-ac9d-b91594ab6d42" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper" I0125 05:16:07.822967 4678 audit.go:45] 2017-01-25T05:16:07.82295676-05:00 AUDIT: id="54885d39-93ed-48a8-ac9d-b91594ab6d42" response="200" I0125 05:16:07.823311 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-helper: (2.048891ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:07.824236 4678 audit.go:125] 2017-01-25T05:16:07.824188643-05:00 AUDIT: id="106008cc-afbf-4e3d-ab01-06cd11864414" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:16:07.825066 4678 audit.go:45] 2017-01-25T05:16:07.825055753-05:00 AUDIT: id="106008cc-afbf-4e3d-ab01-06cd11864414" response="200" I0125 05:16:07.825345 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (1.301923ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:07.826132 4678 audit.go:125] 2017-01-25T05:16:07.826097273-05:00 AUDIT: id="ffeede0c-5417-4e2b-8f07-6ec7d3c6e366" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:07.827326 4678 audit.go:45] 2017-01-25T05:16:07.82731348-05:00 AUDIT: id="ffeede0c-5417-4e2b-8f07-6ec7d3c6e366" response="200" I0125 05:16:07.827939 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (2.00864ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:07.829462 4678 audit.go:125] 2017-01-25T05:16:07.829430495-05:00 AUDIT: id="f78f7b2c-150b-447f-b9d9-2a8982b0409f" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:16:07.830298 4678 audit.go:45] 2017-01-25T05:16:07.830288177-05:00 AUDIT: id="f78f7b2c-150b-447f-b9d9-2a8982b0409f" response="200" I0125 05:16:07.830579 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (1.321846ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:07.831807 4678 audit.go:125] 2017-01-25T05:16:07.831783145-05:00 AUDIT: id="4d1a24ec-3194-43db-ad32-c5dd2cb1ca4a" ip="172.18.7.222" method="PUT" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:16:07.834506 4678 audit.go:45] 2017-01-25T05:16:07.83449239-05:00 AUDIT: id="4d1a24ec-3194-43db-ad32-c5dd2cb1ca4a" response="200" I0125 05:16:07.835007 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (3.373198ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:07.835521 4678 factory.go:154] Replication controller "postgresql-helper-1" updated. I0125 05:16:07.836117 4678 replication_controller.go:322] Observed updated replication controller postgresql-helper-1. Desired pod count change: 0->0 I0125 05:16:07.836212 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d in state Running, deletion time 2017-01-25 05:16:36.827925649 -0500 EST I0125 05:16:07.836243 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (76.165µs) I0125 05:16:07.836419 4678 audit.go:125] 2017-01-25T05:16:07.836394461-05:00 AUDIT: id="448e5278-b8f3-405c-b566-c4f6fce52a21" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:16:07.837253 4678 audit.go:45] 2017-01-25T05:16:07.837239126-05:00 AUDIT: id="448e5278-b8f3-405c-b566-c4f6fce52a21" response="200" I0125 05:16:07.837543 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (1.357781ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:07.838539 4678 audit.go:125] 2017-01-25T05:16:07.838514185-05:00 AUDIT: id="b48e0364-31e7-4f56-b518-d5fc12e245ed" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1" I0125 05:16:07.841136 4678 audit.go:45] 2017-01-25T05:16:07.841121641-05:00 AUDIT: id="b48e0364-31e7-4f56-b518-d5fc12e245ed" response="200" I0125 05:16:07.841193 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-helper-1: (2.848121ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:07.842382 4678 replication_controller.go:660] Replication Controller has been deleted extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1 I0125 05:16:07.842402 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" (28.884µs) I0125 05:16:07.842694 4678 audit.go:125] 2017-01-25T05:16:07.842662156-05:00 AUDIT: id="1a3f2c25-5fcd-4d95-9fde-0ccd497fbdb6" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper" I0125 05:16:07.842829 4678 factory.go:181] Replication controller "postgresql-helper-1" deleted. I0125 05:16:07.845379 4678 audit.go:45] 2017-01-25T05:16:07.84536576-05:00 AUDIT: id="1a3f2c25-5fcd-4d95-9fde-0ccd497fbdb6" response="200" I0125 05:16:07.845427 4678 panics.go:76] DELETE /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-helper: (3.003601ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:07.845758 4678 factory.go:140] Deleting deployment config "postgresql-helper" I0125 05:16:07.845790 4678 factory.go:265] Deployment config "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" has been deleted I0125 05:16:07.847739 4678 audit.go:125] 2017-01-25T05:16:07.847708194-05:00 AUDIT: id="47daf973-f1e3-48ad-81aa-b0fadb000b62" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master" I0125 05:16:07.848622 4678 audit.go:45] 2017-01-25T05:16:07.84861024-05:00 AUDIT: id="47daf973-f1e3-48ad-81aa-b0fadb000b62" response="200" I0125 05:16:07.848763 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master: (1.215311ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:07.849886 4678 audit.go:125] 2017-01-25T05:16:07.849860099-05:00 AUDIT: id="73eb092a-eb23-465e-92c9-84547ac0d04f" ip="172.18.7.222" method="PUT" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master" I0125 05:16:07.851763 4678 audit.go:45] 2017-01-25T05:16:07.851736893-05:00 AUDIT: id="73eb092a-eb23-465e-92c9-84547ac0d04f" response="200" I0125 05:16:07.851961 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master: (2.279268ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:07.852109 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:16:07.853085 4678 audit.go:125] 2017-01-25T05:16:07.853060299-05:00 AUDIT: id="fa53debc-fe24-405f-9bda-48cbc187c0f3" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:07.854980 4678 audit.go:45] 2017-01-25T05:16:07.854968308-05:00 AUDIT: id="fa53debc-fe24-405f-9bda-48cbc187c0f3" response="200" I0125 05:16:07.855188 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (2.295001ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:07.856380 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:16:07.856769 4678 audit.go:125] 2017-01-25T05:16:07.856731861-05:00 AUDIT: id="f5587d6c-beca-4cc8-a954-104f039b17c3" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1" I0125 05:16:07.856823 4678 audit.go:125] 2017-01-25T05:16:07.856796484-05:00 AUDIT: id="1c552485-1a35-4fa0-b37d-5d205fee290a" ip="172.18.7.222" method="POST" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:07.857234 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 1->0 I0125 05:16:07.857296 4678 replication_controller.go:585] Too many "extended-test-postgresql-replication-1-34bbd-xd4g8"/"postgresql-master-2" replicas, need 0, deleting 1 I0125 05:16:07.857312 4678 controller_utils.go:306] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 waiting on deletions for: [extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k] I0125 05:16:07.857326 4678 controller_utils.go:175] Setting expectations &controller.ControlleeExpectations{add:0, del:1, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2", timestamp:time.Time{sec:63620936167, nsec:857322999, loc:(*time.Location)(0xa2479e0)}} I0125 05:16:07.857368 4678 controller_utils.go:523] Controller postgresql-master-2 deleting pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k I0125 05:16:07.860453 4678 audit.go:125] 2017-01-25T05:16:07.860417819-05:00 AUDIT: id="1d35a40d-19c4-4a63-989b-a1bd435a42ae" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k" I0125 05:16:07.860624 4678 audit.go:45] 2017-01-25T05:16:07.860610754-05:00 AUDIT: id="f5587d6c-beca-4cc8-a954-104f039b17c3" response="200" I0125 05:16:07.860670 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-1: (4.183412ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:07.861190 4678 audit.go:45] 2017-01-25T05:16:07.861174754-05:00 AUDIT: id="1c552485-1a35-4fa0-b37d-5d205fee290a" response="201" I0125 05:16:07.861258 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.680855ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:07.861953 4678 deployment_util.go:784] Comparing pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k ready condition last transition time 2017-01-25 05:13:12 -0500 EST + minReadySeconds 0 with now 2017-01-25 05:16:07.86194029 -0500 EST. I0125 05:16:07.862379 4678 factory.go:181] Replication controller "postgresql-master-1" deleted. I0125 05:16:07.862813 4678 replication_controller.go:660] Replication Controller has been deleted extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1 I0125 05:16:07.862827 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-1" (24.621µs) I0125 05:16:07.862938 4678 audit.go:125] 2017-01-25T05:16:07.862903823-05:00 AUDIT: id="31dd74ce-bfae-4f35-b10c-4506704bc3bd" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:16:07.864218 4678 audit.go:45] 2017-01-25T05:16:07.864187855-05:00 AUDIT: id="1d35a40d-19c4-4a63-989b-a1bd435a42ae" response="200" I0125 05:16:07.864296 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k: (6.561361ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.864881 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 1->1 (need 0), fullyLabeledReplicas 1->1, readyReplicas 1->1, availableReplicas 1->1, sequence No: 2->3 I0125 05:16:07.865124 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11158 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11229 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42a354ae8 Labels:map[app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:07.865274 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:37.862194617 -0500 EST, labels map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master]. I0125 05:16:07.865343 4678 controller_utils.go:320] Controller extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 received delete for pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k I0125 05:16:07.865353 4678 controller_utils.go:192] Lowered expectations &controller.ControlleeExpectations{add:0, del:0, key:"extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2", timestamp:time.Time{sec:63620936167, nsec:857322999, loc:(*time.Location)(0xa2479e0)}} I0125 05:16:07.865402 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11158 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11229 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42a354ae8 Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} ] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:07.864881 4678 config.go:281] Setting pods for source api I0125 05:16:07.865489 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:37.862194617 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-46j9k", GenerateName:"postgresql-master-2-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11229", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935982, nsec:474873911, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42e755e20), DeletionGracePeriodSeconds:(*int64)(0xc42a354ae8), Labels:map[string]string{"deployment":"postgresql-master-2", "deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example"}, Annotations:map[string]string{"openshift.io/deployment-config.latest-version":"2", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-2\",\"uid\":\"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11137\"}}\n", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-2", "openshift.io/scc":"restricted", "openshift.io/generated-by":"OpenShiftNewApp"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc42e755f60), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42adc2f30), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"newpass", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42adc2fc0), ReadinessProbe:(*api.Probe)(0xc42adc2ff0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42adc3110), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc42a354e60), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc43ad7ae80), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935992, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.2", StartTime:(*unversioned.Time)(0xc42d3be1c0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(0xc42d3be200), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:true, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d"}}}}. I0125 05:16:07.865804 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:16:07.865843 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:16:07.865861 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:16:07.865896 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:16:07.865919 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:16:07.865939 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:16:07.865946 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:16:07.866161 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:16:07.866815 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:07.866869 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:07.867172 4678 audit.go:45] 2017-01-25T05:16:07.867160178-05:00 AUDIT: id="31dd74ce-bfae-4f35-b10c-4506704bc3bd" response="200" I0125 05:16:07.867294 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (4.634253ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:07.867401 4678 event.go:217] Event(api.ObjectReference{Kind:"ReplicationController", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2", UID:"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11226", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: postgresql-master-2-46j9k I0125 05:16:07.867546 4678 docker_manager.go:1536] Killing container "ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d postgresql-master extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" with 30 second grace period I0125 05:16:07.868352 4678 audit.go:125] 2017-01-25T05:16:07.868320574-05:00 AUDIT: id="88860075-c403-4538-8b9d-09f2ba8b1532" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k" I0125 05:16:07.869046 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 5) I0125 05:16:07.869781 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:16:07.871734 4678 audit.go:125] 2017-01-25T05:16:07.871697691-05:00 AUDIT: id="8e10dc28-efd4-491d-825a-3e6461f43da5" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:16:07.872251 4678 audit.go:45] 2017-01-25T05:16:07.87223821-05:00 AUDIT: id="8e10dc28-efd4-491d-825a-3e6461f43da5" response="409" I0125 05:16:07.872292 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (809.583µs) 409 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:07.872849 4678 audit.go:45] 2017-01-25T05:16:07.872839116-05:00 AUDIT: id="88860075-c403-4538-8b9d-09f2ba8b1532" response="200" I0125 05:16:07.872904 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k: (4.820393ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:07.873112 4678 controller.go:294] Cannot update the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master": Operation cannot be fulfilled on deploymentconfigs "postgresql-master": the object has been modified; please apply your changes to the latest version and try again I0125 05:16:07.873124 4678 controller.go:393] Error syncing deployment config extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master: Operation cannot be fulfilled on deploymentconfigs "postgresql-master": the object has been modified; please apply your changes to the latest version and try again I0125 05:16:07.874094 4678 audit.go:125] 2017-01-25T05:16:07.874060802-05:00 AUDIT: id="08c58799-d4fd-4667-a2d2-9e5512f6a262" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status" I0125 05:16:07.875164 4678 audit.go:125] 2017-01-25T05:16:07.875130251-05:00 AUDIT: id="0a765621-fbde-4228-98e5-d54d9fa824c5" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:07.875360 4678 audit.go:125] 2017-01-25T05:16:07.87531711-05:00 AUDIT: id="5a4e90f6-48c3-43ab-baa3-decbbf2d057d" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:16:07.875583 4678 audit.go:125] 2017-01-25T05:16:07.87555055-05:00 AUDIT: id="a5885579-7e2d-4b8d-8948-6145728efb18" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:16:07.876394 4678 audit.go:125] 2017-01-25T05:16:07.876359897-05:00 AUDIT: id="abab5ab6-847d-4c89-ae39-e5d355c9ec93" ip="172.18.7.222" method="POST" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:07.877531 4678 audit.go:45] 2017-01-25T05:16:07.87751381-05:00 AUDIT: id="0a765621-fbde-4228-98e5-d54d9fa824c5" response="200" I0125 05:16:07.877590 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (10.029009ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:07.877844 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:16:07.878279 4678 audit.go:45] 2017-01-25T05:16:07.878265711-05:00 AUDIT: id="08c58799-d4fd-4667-a2d2-9e5512f6a262" response="200" I0125 05:16:07.878352 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status: (4.519073ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:07.878364 4678 audit.go:45] 2017-01-25T05:16:07.878352952-05:00 AUDIT: id="5a4e90f6-48c3-43ab-baa3-decbbf2d057d" response="200" I0125 05:16:07.878470 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (3.400647ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:07.879528 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 5) I0125 05:16:07.879684 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11229 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42a354ae8 Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11232 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc429aa3270 Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:07.879769 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:37.862194617 -0500 EST, labels map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example]. I0125 05:16:07.879792 4678 status_manager.go:425] Status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935992 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.17.0.2 StartTime:0xc429674340 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running:0xc43039ec40 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d}]} version:4 podName:postgresql-master-2-46j9k podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:16:07.879849 4678 status_manager.go:435] Pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" is terminated, but some containers are still running I0125 05:16:07.879834 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11229 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42a354ae8 Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11232 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc429aa3270 Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2] Annotations:map[openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:07.879989 4678 config.go:281] Setting pods for source api I0125 05:16:07.879903 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:37.862194617 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-46j9k", GenerateName:"postgresql-master-2-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11232", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935982, nsec:474873911, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc4353bb960), DeletionGracePeriodSeconds:(*int64)(0xc429aa3270), Labels:map[string]string{"deployment":"postgresql-master-2", "deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"2", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-2\",\"uid\":\"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11137\"}}\n", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-2"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc4353bbaa0), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc427246300), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"newpass", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc427246540), ReadinessProbe:(*api.Probe)(0xc4272466f0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc427246840), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc429aa3ae0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc42f39b900), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935992, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.2", StartTime:(*unversioned.Time)(0xc4353bbd00), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(0xc4353bbd40), Terminated:(*api.ContainerStateTerminated)(nil)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:true, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d"}}}}. I0125 05:16:07.880217 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:16:07.880246 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:16:07.880258 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:16:07.880283 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:16:07.880302 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:16:07.880319 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:16:07.880328 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:16:07.880393 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:16:07.881051 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:16:07.883341 4678 audit.go:45] 2017-01-25T05:16:07.883326407-05:00 AUDIT: id="a5885579-7e2d-4b8d-8948-6145728efb18" response="200" I0125 05:16:07.884513 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (17.89787ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.885246 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:16:07.886078 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (28.801611ms) I0125 05:16:07.886131 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k in state Running, deletion time 2017-01-25 05:16:37.862194617 -0500 EST I0125 05:16:07.886162 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 1->0, availableReplicas 1->0, sequence No: 2->3 I0125 05:16:07.886521 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->0 I0125 05:16:07.886622 4678 audit.go:45] 2017-01-25T05:16:07.88660898-05:00 AUDIT: id="abab5ab6-847d-4c89-ae39-e5d355c9ec93" response="201" I0125 05:16:07.886672 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (19.596331ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.888843 4678 audit.go:125] 2017-01-25T05:16:07.888809356-05:00 AUDIT: id="12fe14d3-1942-4b4f-a327-5e799371bd26" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:07.889599 4678 audit.go:125] 2017-01-25T05:16:07.889565281-05:00 AUDIT: id="eef6f7ce-f35b-4847-8f38-131d7e384018" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:16:07.890065 4678 audit.go:45] 2017-01-25T05:16:07.89005237-05:00 AUDIT: id="eef6f7ce-f35b-4847-8f38-131d7e384018" response="409" I0125 05:16:07.890113 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (2.694146ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.890481 4678 audit.go:45] 2017-01-25T05:16:07.890467559-05:00 AUDIT: id="12fe14d3-1942-4b4f-a327-5e799371bd26" response="200" I0125 05:16:07.890534 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (9.900904ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:07.891013 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:07.891082 4678 roundrobin.go:275] LoadBalancerRR: Removing endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master: I0125 05:16:07.891158 4678 proxier.go:631] Removing endpoints for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master:" I0125 05:16:07.891184 4678 proxier.go:804] Syncing iptables rules I0125 05:16:07.891194 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:07.907355 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (41.340167ms) I0125 05:16:07.907439 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:07.907570 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:07.907596 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:07.907609 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:07.907639 4678 healthcheck.go:86] LB service health check mutation request Service: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master - 0 Endpoints [] I0125 05:16:07.907697 4678 healthcheck.go:89] Deleting endpoints map for service extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, all local endpoints gone I0125 05:16:07.907753 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:07.907777 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:07.907812 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:07.916556 4678 audit.go:125] 2017-01-25T05:16:07.916500157-05:00 AUDIT: id="1c905f55-7d4c-4934-b6e2-29d4ba3f3692" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:07.918453 4678 audit.go:125] 2017-01-25T05:16:07.918422382-05:00 AUDIT: id="07049314-a5f3-418f-b225-d73bfdd9f405" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:07.918991 4678 audit.go:45] 2017-01-25T05:16:07.91897849-05:00 AUDIT: id="1c905f55-7d4c-4934-b6e2-29d4ba3f3692" response="200" I0125 05:16:07.919279 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (28.638897ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.919596 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 1->0, availableReplicas 1->0, sequence No: 3->3 I0125 05:16:07.920574 4678 audit.go:45] 2017-01-25T05:16:07.920561358-05:00 AUDIT: id="07049314-a5f3-418f-b225-d73bfdd9f405" response="200" I0125 05:16:07.920633 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (10.506887ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:07.922061 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:16:07.924148 4678 audit.go:125] 2017-01-25T05:16:07.924117181-05:00 AUDIT: id="ce575e0b-de38-4665-b2b6-5e0fa175c74e" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:16:07.926384 4678 audit.go:125] 2017-01-25T05:16:07.926348616-05:00 AUDIT: id="ea99543c-d2ff-4349-98a9-d3261fffad6e" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:07.926882 4678 audit.go:45] 2017-01-25T05:16:07.92686943-05:00 AUDIT: id="ea99543c-d2ff-4349-98a9-d3261fffad6e" response="200" I0125 05:16:07.926938 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (4.472711ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:07.927136 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (19.618138ms) I0125 05:16:07.927609 4678 audit.go:45] 2017-01-25T05:16:07.927596008-05:00 AUDIT: id="ce575e0b-de38-4665-b2b6-5e0fa175c74e" response="200" I0125 05:16:07.927806 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (6.821548ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.928135 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (42.025057ms) I0125 05:16:07.928233 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k in state Running, deletion time 2017-01-25 05:16:37.862194617 -0500 EST I0125 05:16:07.928267 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 1->0 (need 0), fullyLabeledReplicas 1->0, readyReplicas 1->0, availableReplicas 1->0, sequence No: 3->3 I0125 05:16:07.928937 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:16:07.928997 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->0 I0125 05:16:07.929008 4678 replication_controller.go:338] Observed updated replica count for rc: postgresql-master-2, 1->0 I0125 05:16:07.930038 4678 audit.go:125] 2017-01-25T05:16:07.929998401-05:00 AUDIT: id="ea11288d-0f86-40fd-b1aa-9a04db46ac2d" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status" I0125 05:16:07.931260 4678 audit.go:125] 2017-01-25T05:16:07.931225877-05:00 AUDIT: id="260d1823-295b-496e-b29c-1cf71a9f3515" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:16:07.931729 4678 audit.go:45] 2017-01-25T05:16:07.931715709-05:00 AUDIT: id="260d1823-295b-496e-b29c-1cf71a9f3515" response="409" I0125 05:16:07.931777 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (2.470398ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.932483 4678 audit.go:45] 2017-01-25T05:16:07.932470303-05:00 AUDIT: id="ea11288d-0f86-40fd-b1aa-9a04db46ac2d" response="200" I0125 05:16:07.932599 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master/status: (2.838761ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:07.932960 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" (observed generation: 5) I0125 05:16:07.933176 4678 factory.go:122] Updating deployment config "postgresql-master" I0125 05:16:07.934035 4678 audit.go:125] 2017-01-25T05:16:07.934004412-05:00 AUDIT: id="cd13a412-a27b-4686-b65f-2754cef148a6" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:07.934860 4678 audit.go:45] 2017-01-25T05:16:07.934847123-05:00 AUDIT: id="cd13a412-a27b-4686-b65f-2754cef148a6" response="200" I0125 05:16:07.935041 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (2.776439ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.935267 4678 replication_controller_utils.go:58] Updating replica count for rc: extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2, replicas 0->0 (need 0), fullyLabeledReplicas 0->0, readyReplicas 0->0, availableReplicas 0->0, sequence No: 3->3 I0125 05:16:07.940312 4678 audit.go:125] 2017-01-25T05:16:07.940276289-05:00 AUDIT: id="85e2ba62-c03f-4a45-b136-35daa4b198be" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:replication-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status" I0125 05:16:07.942944 4678 helpers.go:101] Unable to get network stats from pid 10710: couldn't read network stats: failure opening /proc/10710/net/dev: open /proc/10710/net/dev: no such file or directory I0125 05:16:07.944593 4678 audit.go:45] 2017-01-25T05:16:07.944578503-05:00 AUDIT: id="85e2ba62-c03f-4a45-b136-35daa4b198be" response="200" I0125 05:16:07.945640 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2/status: (9.98381ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:replication-controller] 172.18.7.222:50846] I0125 05:16:07.946290 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:16:07.946690 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->0 I0125 05:16:07.946931 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (18.751205ms) I0125 05:16:07.947000 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k in state Running, deletion time 2017-01-25 05:16:37.862194617 -0500 EST I0125 05:16:07.947027 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (52.103µs) I0125 05:16:07.956830 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:07.983303 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:07.993452 4678 volume_manager.go:365] All volumes are attached and mounted for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:07.994858 4678 audit.go:125] 2017-01-25T05:16:07.994824592-05:00 AUDIT: id="e5c7c455-1e48-4105-8ce7-15efd87adf0e" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c" I0125 05:16:07.997311 4678 audit.go:45] 2017-01-25T05:16:07.997295941-05:00 AUDIT: id="e5c7c455-1e48-4105-8ce7-15efd87adf0e" response="200" I0125 05:16:07.997667 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/registry-dockercfg-k7v0c: (3.20898ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:07.998100 4678 docker_manager.go:1938] Found pod infra container for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:07.998181 4678 docker_manager.go:1951] Pod infra container looks good, keep it "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:07.998228 4678 docker_manager.go:1999] pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)" container "registry" exists as b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b I0125 05:16:07.998460 4678 docker_manager.go:2086] Got container changes for pod "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[f3e8c4070605ec0a325ddc790afad13d1f5e410da6899802c43cad4aaaabef59:-1 b146209707ed0c5c63c3a291f2d06b718bd68e5548d3083b21da6c778ef7734b:0]} I0125 05:16:08.008473 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.029866 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.042421 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:08.049093 4678 docker_manager.go:1577] Container "ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d postgresql-master extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" exited after 181.515347ms I0125 05:16:08.050080 4678 server.go:664] Event(api.ObjectReference{Kind:"Pod", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", Name:"postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", APIVersion:"v1", ResourceVersion:"11140", FieldPath:"spec.containers{postgresql-master}"}): type: 'Normal' reason: 'Killing' Killing container with docker id ebd85b26ebba: Need to kill pod. I0125 05:16:08.051066 4678 audit.go:125] 2017-01-25T05:16:08.051013348-05:00 AUDIT: id="142aaa62-0c14-4c55-826c-9e4cfb1ddb2c" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:08.055314 4678 audit.go:45] 2017-01-25T05:16:08.055297215-05:00 AUDIT: id="142aaa62-0c14-4c55-826c-9e4cfb1ddb2c" response="201" I0125 05:16:08.055407 4678 panics.go:76] POST /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (4.796188ms) 201 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:08.058425 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:16:08.060117 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:08.060168 4678 docker_manager.go:1536] Killing container "e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" with 30 second grace period I0125 05:16:08.076588 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:08.094129 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:08.108718 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:08.108758 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:08.153187 4678 proxier.go:797] syncProxyRules took 261.996624ms I0125 05:16:08.153238 4678 proxier.go:566] OnEndpointsUpdate took 262.136014ms for 6 endpoints I0125 05:16:08.153284 4678 proxier.go:381] Received update notice: [] I0125 05:16:08.153329 4678 proxier.go:804] Syncing iptables rules I0125 05:16:08.153340 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:08.170764 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:08.180876 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.198462 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.227068 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.239456 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:08.251689 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:08.267390 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:08.281664 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:08.294052 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:08.294086 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:08.306236 4678 proxier.go:797] syncProxyRules took 152.900112ms I0125 05:16:08.306264 4678 proxier.go:431] OnServiceUpdate took 152.965864ms for 4 services I0125 05:16:08.313952 4678 docker_manager.go:1577] Container "e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" exited after 253.762151ms I0125 05:16:08.365763 4678 generic.go:145] GenericPLEG: b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017: running -> exited I0125 05:16:08.365800 4678 generic.go:145] GenericPLEG: daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d: running -> exited I0125 05:16:08.365807 4678 generic.go:145] GenericPLEG: daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650: running -> exited I0125 05:16:08.370632 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc4268fcdc0 Mounts:[{Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql/422ec933 Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~empty-dir/postgresql-helper-data Destination:/var/lib/pgsql/data Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc4359df200 NetworkSettings:0xc42726d200} I0125 05:16:08.374566 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc430050580 Mounts:[] Config:0xc42c0dfe60 NetworkSettings:0xc42d8c5400} I0125 05:16:08.376084 4678 generic.go:342] PLEG: Write status for postgresql-helper-1-cpv6d/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-helper-1-cpv6d", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a5b10a0), (*container.ContainerStatus)(0xc426224460)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:16:08.376155 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.376225 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017"} I0125 05:16:08.378283 4678 audit.go:125] 2017-01-25T05:16:08.378240125-05:00 AUDIT: id="7e02b29a-1d1b-45e3-95c9-cec1ef19d47d" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:16:08.379587 4678 audit.go:45] 2017-01-25T05:16:08.379572386-05:00 AUDIT: id="7e02b29a-1d1b-45e3-95c9-cec1ef19d47d" response="200" I0125 05:16:08.379697 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (1.772842ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:08.381779 4678 audit.go:125] 2017-01-25T05:16:08.381735347-05:00 AUDIT: id="889b09cb-520f-46e6-b8ad-64b722523eef" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status" I0125 05:16:08.383155 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc426890dc0 Mounts:[{Name: Source:/tmp/openshift-extended-tests/persistent-volumes816894978/0000099920249 Destination:/var/lib/pgsql/data Driver: Mode: RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw Destination:/var/run/secrets/kubernetes.io/serviceaccount Driver: Mode:ro,Z RW:false Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/etc-hosts Destination:/etc/hosts Driver: Mode:Z RW:true Propagation:rprivate} {Name: Source:/mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/containers/postgresql-master/566678ec Destination:/dev/termination-log Driver: Mode:Z RW:true Propagation:rprivate}] Config:0xc42a664900 NetworkSettings:0xc425b66600} I0125 05:16:08.384877 4678 audit.go:45] 2017-01-25T05:16:08.384861948-05:00 AUDIT: id="889b09cb-520f-46e6-b8ad-64b722523eef" response="200" I0125 05:16:08.384982 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d/status: (3.492943ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:08.385491 4678 config.go:281] Setting pods for source api I0125 05:16:08.385700 4678 status_manager.go:425] Status for pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620936168 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935922 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc426a5ce80 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql State:{Waiting: Running: Terminated:0xc4265e85b0} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ImageID:docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5 ContainerID:docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208}]} version:5 podName:postgresql-helper-1-cpv6d podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:16:08.385757 4678 status_manager.go:441] Removing Pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" from etcd I0125 05:16:08.386433 4678 replication_controller.go:378] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11214 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc42bcb5958 Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11240 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc428209cc8 Labels:map[deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral] Annotations:map[openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.386536 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:36.827925649 -0500 EST, labels map[deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1]. I0125 05:16:08.386621 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-cpv6d, replication manager will avoid syncing I0125 05:16:08.386646 4678 replica_set.go:320] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11214 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc42bcb5958 Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11240 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc428209cc8 Labels:map[deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1] Annotations:map[openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.386726 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.386713 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:36.827925649 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-cpv6d", GenerateName:"postgresql-helper-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11240", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:868420924, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42ec8c4c0), DeletionGracePeriodSeconds:(*int64)(0xc428209cc8), Labels:map[string]string{"deploymentconfig":"postgresql-helper", "name":"postgresql-helper", "app":"postgresql-ephemeral", "deployment":"postgresql-helper-1"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/deployment-config.name":"postgresql-helper", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-helper-1\",\"uid\":\"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11028\"}}\n", "openshift.io/deployment.name":"postgresql-helper-1", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-helper-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc428209f20), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4274e7f20), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql", Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_USER", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42ec8c6c0)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42ec8c760)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"sampledb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}, Requests:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-helper-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc426a501b0), ReadinessProbe:(*api.Probe)(0xc426a501e0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc426a50210), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc427b40350), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc43010dbc0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620936168, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc42ec8ca60), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc42639d180)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ImageID:"docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ContainerID:"docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208"}}}}. I0125 05:16:08.387046 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:16:08.387080 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:16:08.387099 4678 daemoncontroller.go:332] Pod postgresql-helper-1-cpv6d updated. I0125 05:16:08.387134 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:16:08.387157 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-cpv6d" I0125 05:16:08.387180 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:16:08.387186 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:16:08.387283 4678 audit.go:125] 2017-01-25T05:16:08.387251959-05:00 AUDIT: id="ad53deae-3b0f-42d9-941f-c69ca97dd80e" ip="172.18.7.222" method="DELETE" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d" I0125 05:16:08.387453 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:16:08.387486 4678 factory.go:211] Replication controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" has been deleted I0125 05:16:08.389757 4678 audit.go:125] 2017-01-25T05:16:08.389717994-05:00 AUDIT: id="6aa5ecb9-4fdf-4942-ac3e-af1784be232a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:08.390461 4678 config.go:281] Setting pods for source api I0125 05:16:08.390692 4678 audit.go:45] 2017-01-25T05:16:08.390674499-05:00 AUDIT: id="6aa5ecb9-4fdf-4942-ac3e-af1784be232a" response="200" I0125 05:16:08.390760 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (3.07739ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.391227 4678 replication_controller.go:378] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11240 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc428209cc8 Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11241 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:06.827925649 -0500 EST DeletionGracePeriodSeconds:0xc4227fb028 Labels:map[name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper] Annotations:map[openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.391313 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:06.827925649 -0500 EST, labels map[deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral]. I0125 05:16:08.391374 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-cpv6d, replication manager will avoid syncing I0125 05:16:08.391388 4678 replica_set.go:320] Pod postgresql-helper-1-cpv6d updated, objectMeta {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11240 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:36.827925649 -0500 EST DeletionGracePeriodSeconds:0xc428209cc8 Labels:map[deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral deployment:postgresql-helper-1] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-helper-1-cpv6d GenerateName:postgresql-helper-1- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d UID:b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11241 Generation:0 CreationTimestamp:2017-01-25 05:12:02.868420924 -0500 EST DeletionTimestamp:2017-01-25 05:16:06.827925649 -0500 EST DeletionGracePeriodSeconds:0xc4227fb028 Labels:map[app:postgresql-ephemeral deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper] Annotations:map[openshift.io/deployment-config.latest-version:1 openshift.io/scc:restricted openshift.io/deployment-config.name:postgresql-helper kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-helper-1","uid":"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11028"}} openshift.io/deployment.name:postgresql-helper-1 openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.391588 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.391454 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:06.827925649 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-cpv6d", GenerateName:"postgresql-helper-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11241", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:868420924, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42da6a2a0), DeletionGracePeriodSeconds:(*int64)(0xc4227fb028), Labels:map[string]string{"deploymentconfig":"postgresql-helper", "name":"postgresql-helper", "app":"postgresql-ephemeral", "deployment":"postgresql-helper-1"}, Annotations:map[string]string{"openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"1", "openshift.io/scc":"restricted", "openshift.io/deployment-config.name":"postgresql-helper", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-helper-1\",\"uid\":\"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11028\"}}\n", "openshift.io/deployment.name":"postgresql-helper-1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-helper-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc4227fb190), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42b5c7dd0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql", Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_USER", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42da6a4e0)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"", ValueFrom:(*api.EnvVarSource)(0xc42da6a560)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"sampledb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}, Requests:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-helper-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42b5c7fb0), ReadinessProbe:(*api.Probe)(0xc42a78a000), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42a78a030), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc4227fb440), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc427287f80), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620936168, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc42da6a760), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc42b71e690)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ImageID:"docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ContainerID:"docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208"}}}}. I0125 05:16:08.391745 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:16:08.391768 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:16:08.391779 4678 daemoncontroller.go:332] Pod postgresql-helper-1-cpv6d updated. I0125 05:16:08.391812 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:16:08.391829 4678 disruption.go:326] updatePod called on pod "postgresql-helper-1-cpv6d" I0125 05:16:08.391848 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:16:08.391854 4678 disruption.go:329] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:16:08.391908 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:16:08.391926 4678 factory.go:211] Replication controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" has been deleted I0125 05:16:08.392157 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:16:08.393455 4678 audit.go:45] 2017-01-25T05:16:08.393441832-05:00 AUDIT: id="ad53deae-3b0f-42d9-941f-c69ca97dd80e" response="200" I0125 05:16:08.393545 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d: (6.542545ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:08.393632 4678 config.go:281] Setting pods for source api I0125 05:16:08.394589 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.394629 4678 kubelet.go:1976] Failed to delete pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)", err: pod not found I0125 05:16:08.394816 4678 audit.go:125] 2017-01-25T05:16:08.394783738-05:00 AUDIT: id="1800fc3e-c077-4890-843d-1b87ca3d052c" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:08.394951 4678 status_manager.go:443] Pod "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" fully terminated and removed from etcd I0125 05:16:08.395112 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:16:06.827925649 -0500 EST, labels map[deployment:postgresql-helper-1 deploymentconfig:postgresql-helper name:postgresql-helper app:postgresql-ephemeral]. I0125 05:16:08.395183 4678 replication_controller.go:255] No controllers found for pod postgresql-helper-1-cpv6d, replication manager will avoid syncing I0125 05:16:08.395309 4678 audit.go:45] 2017-01-25T05:16:08.395296786-05:00 AUDIT: id="1800fc3e-c077-4890-843d-1b87ca3d052c" response="200" I0125 05:16:08.395357 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (2.82816ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.395217 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:16:06.827925649 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-helper-1-cpv6d", GenerateName:"postgresql-helper-1-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-helper-1-cpv6d", UID:"b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11242", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935922, nsec:868420924, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc427408320), DeletionGracePeriodSeconds:(*int64)(0xc429bdee90), Labels:map[string]string{"deployment":"postgresql-helper-1", "deploymentconfig":"postgresql-helper", "name":"postgresql-helper", "app":"postgresql-ephemeral"}, Annotations:map[string]string{"openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"1", "openshift.io/scc":"restricted", "openshift.io/deployment-config.name":"postgresql-helper", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-helper-1\",\"uid\":\"b3de835f-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11028\"}}\n", "openshift.io/deployment.name":"postgresql-helper-1"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-helper-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(0xc429bdeef0), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4294b4e10), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql", Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_USER", Value:"", ValueFrom:(*api.EnvVarSource)(0xc4274084c0)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"", ValueFrom:(*api.EnvVarSource)(0xc427408540)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"sampledb", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}, Requests:api.ResourceList{"memory":resource.Quantity{i:resource.int64Amount{value:536870912, scale:0}, d:resource.infDecAmount{Dec:(*inf.Dec)(nil)}, s:"", Format:"BinarySI"}}}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-helper-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc4294b4f00), ReadinessProbe:(*api.Probe)(0xc4294b4f30), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4294b4f60), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc429bdefe0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc427d320c0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620936168, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935922, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc427408720), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc42d0567e0)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ImageID:"docker-pullable://docker.io/centos/postgresql-95-centos7@sha256:02307edc54866b691dd83fc258c7ca66bc8a919e9ec4e5b8bc94ff108b1867f5", ContainerID:"docker://1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208"}}}}. I0125 05:16:08.395551 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-helper-1-cpv6d, ReplicaSet controller will avoid syncing I0125 05:16:08.395580 4678 jobcontroller.go:141] No jobs found for pod postgresql-helper-1-cpv6d, job controller will avoid syncing I0125 05:16:08.395590 4678 daemoncontroller.go:367] Pod postgresql-helper-1-cpv6d deleted. I0125 05:16:08.395611 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (8.344916ms) I0125 05:16:08.395619 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-helper-1-cpv6d, daemon set controller will avoid syncing I0125 05:16:08.395633 4678 disruption.go:355] deletePod called on pod "postgresql-helper-1-cpv6d" I0125 05:16:08.395652 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-helper-1-cpv6d, PodDisruptionBudget controller will avoid syncing. I0125 05:16:08.395658 4678 disruption.go:358] No matching pdb for pod "postgresql-helper-1-cpv6d" I0125 05:16:08.395699 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1-cpv6d deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:16:08.395718 4678 pet_set.go:239] No StatefulSets found for pod postgresql-helper-1-cpv6d, StatefulSet controller will avoid syncing I0125 05:16:08.395734 4678 factory.go:211] Replication controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper-1" has been deleted I0125 05:16:08.423873 4678 audit.go:125] 2017-01-25T05:16:08.423731525-05:00 AUDIT: id="f11e068e-5ba1-4a9e-b459-1b3c69a1ce1e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:08.432893 4678 audit.go:45] 2017-01-25T05:16:08.432864342-05:00 AUDIT: id="f11e068e-5ba1-4a9e-b459-1b3c69a1ce1e" response="200" I0125 05:16:08.433002 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (37.095557ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.433420 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:16:08.443131 4678 audit.go:125] 2017-01-25T05:16:08.443090473-05:00 AUDIT: id="978d1936-f7bf-45a8-9d6c-927a5ce714b9" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:08.444457 4678 audit.go:45] 2017-01-25T05:16:08.444439737-05:00 AUDIT: id="978d1936-f7bf-45a8-9d6c-927a5ce714b9" response="200" I0125 05:16:08.445457 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (2.811077ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:08.446514 4678 audit.go:125] 2017-01-25T05:16:08.446289493-05:00 AUDIT: id="2c0b9c25-22a1-4ed3-b516-7671d09ca76d" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:08.453971 4678 docker_manager.go:389] Container inspect result: {ContainerJSONBase:0xc425e2f080 Mounts:[] Config:0xc42bf88a20 NetworkSettings:0xc42f43a200} I0125 05:16:08.454884 4678 audit.go:45] 2017-01-25T05:16:08.454865996-05:00 AUDIT: id="2c0b9c25-22a1-4ed3-b516-7671d09ca76d" response="200" I0125 05:16:08.454967 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (19.507006ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.455520 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:08.455750 4678 proxier.go:804] Syncing iptables rules I0125 05:16:08.455773 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:08.472740 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:08.472809 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:08.472918 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:08.472936 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:08.472954 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:08.472969 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:08.472985 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:08.473395 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (77.755025ms) I0125 05:16:08.476923 4678 generic.go:342] PLEG: Write status for postgresql-master-2-46j9k/extended-test-postgresql-replication-1-34bbd-xd4g8: &container.PodStatus{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Name:"postgresql-master-2-46j9k", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", IP:"", ContainerStatuses:[]*container.ContainerStatus{(*container.ContainerStatus)(0xc42a989b20), (*container.ContainerStatus)(0xc42a989ea0)}, SandboxStatuses:[]*runtime.PodSandboxStatus(nil)} (err: ) I0125 05:16:08.477324 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d"} I0125 05:16:08.477406 4678 kubelet.go:1816] SyncLoop (PLEG): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)", event: &pleg.PodLifecycleEvent{ID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", Type:"ContainerDied", Data:"e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650"} I0125 05:16:08.477572 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.479720 4678 audit.go:125] 2017-01-25T05:16:08.479618645-05:00 AUDIT: id="b10543e6-cf98-4374-869b-aaeaa3392dce" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k" I0125 05:16:08.482182 4678 audit.go:125] 2017-01-25T05:16:08.482079827-05:00 AUDIT: id="f8708b9b-b71e-4b23-bde5-0faa8f0e5426" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:08.486658 4678 audit.go:45] 2017-01-25T05:16:08.486639748-05:00 AUDIT: id="b10543e6-cf98-4374-869b-aaeaa3392dce" response="200" I0125 05:16:08.487070 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k: (7.788566ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:08.488676 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:08.505978 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.521612 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.530410 4678 audit.go:125] 2017-01-25T05:16:08.530319942-05:00 AUDIT: id="3e494afd-52c7-4c82-a8ee-9c5346201f7a" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status" I0125 05:16:08.532933 4678 audit.go:45] 2017-01-25T05:16:08.532906462-05:00 AUDIT: id="f8708b9b-b71e-4b23-bde5-0faa8f0e5426" response="200" I0125 05:16:08.533053 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (59.023185ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.535122 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.549684 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper, ready: 0 not ready: 0 I0125 05:16:08.563564 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:08.572700 4678 audit.go:45] 2017-01-25T05:16:08.572662166-05:00 AUDIT: id="3e494afd-52c7-4c82-a8ee-9c5346201f7a" response="200" I0125 05:16:08.573004 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k/status: (43.049577ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:08.577905 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11232 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc429aa3270 Labels:map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master] Annotations:map[openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} ] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11244 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42761f8f8 Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.578173 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:37.862194617 -0500 EST, labels map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example]. I0125 05:16:08.578388 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k in state Running, deletion time 2017-01-25 05:16:37.862194617 -0500 EST I0125 05:16:08.578424 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (79.121µs) I0125 05:16:08.578801 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:16:08.578830 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:16:08.578948 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:16:08.578990 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:16:08.579063 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:16:08.579072 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:16:08.579557 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:16:08.579955 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11232 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc429aa3270 Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11244 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42761f8f8 Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} ] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.580136 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:37.862194617 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-46j9k", GenerateName:"postgresql-master-2-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11244", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935982, nsec:474873911, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42695fe80), DeletionGracePeriodSeconds:(*int64)(0xc42761f8f8), Labels:map[string]string{"deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example", "deployment":"postgresql-master-2"}, Annotations:map[string]string{"kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-2\",\"uid\":\"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11137\"}}\n", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-2", "openshift.io/scc":"restricted", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"2"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc426938160), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42bad8660), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"newpass", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42bad86f0), ReadinessProbe:(*api.Probe)(0xc42bad8720), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42bad8750), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc42761fd60), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc425c4b680), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620936168, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-master]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc426938cc0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc4226d7ab0)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d"}}}}. I0125 05:16:08.580715 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:16:08.581965 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:08.594477 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") from pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:16:08.594581 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/empty-dir/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-helper-data" (spec.Name: "postgresql-helper-data") from pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:16:08.595328 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:08.609885 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:08.616418 4678 config.go:281] Setting pods for source api I0125 05:16:08.619942 4678 util.go:340] Tearing down volume default-token-0g2nw for pod b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:16:08.620451 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:16:08.620689 4678 kubelet.go:1794] SyncLoop (RECONCILE, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.633751 4678 status_manager.go:425] Status for pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" updated successfully: {status:{Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:} {Type:Ready Status:False LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620936168 nsec:0 loc:0xa2479e0}} Reason:ContainersNotReady Message:containers with unready status: [postgresql-master]} {Type:PodScheduled Status:True LastProbeTime:{Time:{sec:0 nsec:0 loc:0x923f100}} LastTransitionTime:{Time:{sec:63620935982 nsec:0 loc:0xa2479e0}} Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP: StartTime:0xc429674340 InitContainerStatuses:[] ContainerStatuses:[{Name:postgresql-master State:{Waiting: Running: Terminated:0xc42998a540} LastTerminationState:{Waiting: Running: Terminated:} Ready:false RestartCount:0 Image:centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ImageID:docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389 ContainerID:docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d}]} version:5 podName:postgresql-master-2-46j9k podNamespace:extended-test-postgresql-replication-1-34bbd-xd4g8} I0125 05:16:08.633887 4678 status_manager.go:441] Removing Pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" from etcd I0125 05:16:08.636270 4678 audit.go:125] 2017-01-25T05:16:08.63619448-05:00 AUDIT: id="b19c8539-d320-4b7e-88ef-e0eb0817c093" ip="172.18.7.222" method="DELETE" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k" I0125 05:16:08.640190 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (OuterVolumeSpecName: "default-token-0g2nw") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "default-token-0g2nw". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:16:08.654143 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:08.654405 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:08.673093 4678 audit.go:125] 2017-01-25T05:16:08.673016182-05:00 AUDIT: id="c9c827db-37ea-4b87-b34d-8d81df75e9c2" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:08.674646 4678 audit.go:45] 2017-01-25T05:16:08.674626859-05:00 AUDIT: id="c9c827db-37ea-4b87-b34d-8d81df75e9c2" response="200" I0125 05:16:08.674752 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (100.430729ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.695052 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (221.595016ms) I0125 05:16:08.720039 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094-postgresql-helper-data" (OuterVolumeSpecName: "postgresql-helper-data") pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "postgresql-helper-data". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" I0125 05:16:08.722097 4678 audit.go:125] 2017-01-25T05:16:08.722039385-05:00 AUDIT: id="44789a8e-072d-4cbb-b4da-e6676a99b604" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:08.725688 4678 audit.go:45] 2017-01-25T05:16:08.725666815-05:00 AUDIT: id="44789a8e-072d-4cbb-b4da-e6676a99b604" response="200" I0125 05:16:08.725807 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (88.467084ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.727290 4678 config.go:281] Setting pods for source api I0125 05:16:08.727582 4678 replication_controller.go:378] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11244 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42761f8f8 Labels:map[deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example] Annotations:map[kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11245 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:07.862194617 -0500 EST DeletionGracePeriodSeconds:0xc427820e58 Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.728048 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:07.862194617 -0500 EST, labels map[name:postgresql-master app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master]. I0125 05:16:08.729289 4678 controller_utils.go:718] Ignoring inactive pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k in state Running, deletion time 2017-01-25 05:16:07.862194617 -0500 EST I0125 05:16:08.729333 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (91.929µs) I0125 05:16:08.729380 4678 replica_set.go:320] Pod postgresql-master-2-46j9k updated, objectMeta {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11244 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:37.862194617 -0500 EST DeletionGracePeriodSeconds:0xc42761f8f8 Labels:map[app:pg-replica-example deployment:postgresql-master-2 deploymentconfig:postgresql-master name:postgresql-master] Annotations:map[openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-master-2-46j9k GenerateName:postgresql-master-2- Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k UID:daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11245 Generation:0 CreationTimestamp:2017-01-25 05:13:02.474873911 -0500 EST DeletionTimestamp:2017-01-25 05:16:07.862194617 -0500 EST DeletionGracePeriodSeconds:0xc427820e58 Labels:map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2] Annotations:map[openshift.io/scc:restricted openshift.io/generated-by:OpenShiftNewApp openshift.io/deployment-config.latest-version:2 kubernetes.io/created-by:{"kind":"SerializedReference","apiVersion":"v1","reference":{"kind":"ReplicationController","namespace":"extended-test-postgresql-replication-1-34bbd-xd4g8","name":"postgresql-master-2","uid":"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094","apiVersion":"v1","resourceVersion":"11137"}} openshift.io/deployment-config.name:postgresql-master openshift.io/deployment.name:postgresql-master-2] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:08.729689 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:07.862194617 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-46j9k", GenerateName:"postgresql-master-2-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11245", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935982, nsec:474873911, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42f55a2e0), DeletionGracePeriodSeconds:(*int64)(0xc427820e58), Labels:map[string]string{"deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example", "deployment":"postgresql-master-2"}, Annotations:map[string]string{"openshift.io/scc":"restricted", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"2", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-2\",\"uid\":\"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11137\"}}\n", "openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-2"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc42f55a4c0), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc42964d5f0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"newpass", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc42964d6b0), ReadinessProbe:(*api.Probe)(0xc42964d6e0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc42964d710), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc4278216d0), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc4247178c0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620936168, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-master]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc42f55a900), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc422d95340)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d"}}}}. I0125 05:16:08.731627 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:16:08.731685 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:16:08.731713 4678 daemoncontroller.go:332] Pod postgresql-master-2-46j9k updated. I0125 05:16:08.731765 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:16:08.731802 4678 disruption.go:326] updatePod called on pod "postgresql-master-2-46j9k" I0125 05:16:08.731832 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:16:08.731839 4678 disruption.go:329] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:16:08.731978 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:16:08.732329 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.733639 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:16:08.744902 4678 audit.go:45] 2017-01-25T05:16:08.744764513-05:00 AUDIT: id="b19c8539-d320-4b7e-88ef-e0eb0817c093" response="200" I0125 05:16:08.745169 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k: (109.305328ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:08.747175 4678 audit.go:125] 2017-01-25T05:16:08.747126885-05:00 AUDIT: id="f72fd568-0b52-4760-bd8e-7e10c5436c22" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:08.748224 4678 audit.go:45] 2017-01-25T05:16:08.748190737-05:00 AUDIT: id="f72fd568-0b52-4760-bd8e-7e10c5436c22" response="200" I0125 05:16:08.748301 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (11.592012ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.749092 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:16:07.862194617 -0500 EST, labels map[deploymentconfig:postgresql-master name:postgresql-master app:pg-replica-example deployment:postgresql-master-2]. I0125 05:16:08.749296 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (50.086µs) I0125 05:16:08.749336 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:16:07.862194617 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-master-2-46j9k", GenerateName:"postgresql-master-2-", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-master-2-46j9k", UID:"daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11246", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935982, nsec:474873911, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc4265943e0), DeletionGracePeriodSeconds:(*int64)(0xc42c0dc800), Labels:map[string]string{"deploymentconfig":"postgresql-master", "name":"postgresql-master", "app":"pg-replica-example", "deployment":"postgresql-master-2"}, Annotations:map[string]string{"openshift.io/deployment-config.name":"postgresql-master", "openshift.io/deployment.name":"postgresql-master-2", "openshift.io/scc":"restricted", "openshift.io/generated-by":"OpenShiftNewApp", "openshift.io/deployment-config.latest-version":"2", "kubernetes.io/created-by":"{\"kind\":\"SerializedReference\",\"apiVersion\":\"v1\",\"reference\":{\"kind\":\"ReplicationController\",\"namespace\":\"extended-test-postgresql-replication-1-34bbd-xd4g8\",\"name\":\"postgresql-master-2\",\"uid\":\"d712cbe4-e2e6-11e6-a4b0-0e6a5cbf0094\",\"apiVersion\":\"v1\",\"resourceVersion\":\"11137\"}}\n"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"postgresql-data", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(nil), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(0xc426594620), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}, api.Volume{Name:"default-token-0g2nw", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4306532f0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"postgresql-master", Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", Command:[]string(nil), Args:[]string{"run-postgresql-master"}, WorkingDir:"", Ports:[]api.ContainerPort{api.ContainerPort{Name:"", HostPort:0, ContainerPort:5432, Protocol:"TCP", HostIP:""}}, Env:[]api.EnvVar{api.EnvVar{Name:"POSTGRESQL_MASTER_USER", Value:"master", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_MASTER_PASSWORD", Value:"qcoktIqkwDX8", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_USER", Value:"user", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_PASSWORD", Value:"IbyV1wgYrrMd", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_DATABASE", Value:"userdb", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"POSTGRESQL_ADMIN_PASSWORD", Value:"newpass", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"postgresql-data", ReadOnly:false, MountPath:"/var/lib/pgsql/data", SubPath:""}, api.VolumeMount{Name:"default-token-0g2nw", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(0xc430653380), ReadinessProbe:(*api.Probe)(0xc4306533b0), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4306533e0), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc42c0dca20), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"default", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc4241ed340), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"default-dockercfg-03n02"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Running", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620936168, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"ContainersNotReady", Message:"containers with unready status: [postgresql-master]"}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935982, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"", StartTime:(*unversioned.Time)(0xc426594980), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"postgresql-master", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc421e89f80)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ImageID:"docker-pullable://docker.io/centos/postgresql-94-centos7@sha256:e6fd9104a7febd6dcff31bf427f46ddb434e6041b69faff51c1313b526645389", ContainerID:"docker://ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d"}}}}. I0125 05:16:08.749855 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-master-2-46j9k, ReplicaSet controller will avoid syncing I0125 05:16:08.749903 4678 jobcontroller.go:141] No jobs found for pod postgresql-master-2-46j9k, job controller will avoid syncing I0125 05:16:08.749924 4678 daemoncontroller.go:367] Pod postgresql-master-2-46j9k deleted. I0125 05:16:08.749981 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-master-2-46j9k, daemon set controller will avoid syncing I0125 05:16:08.750004 4678 disruption.go:355] deletePod called on pod "postgresql-master-2-46j9k" I0125 05:16:08.750035 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-master-2-46j9k, PodDisruptionBudget controller will avoid syncing. I0125 05:16:08.750042 4678 disruption.go:358] No matching pdb for pod "postgresql-master-2-46j9k" I0125 05:16:08.750131 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:16:08.750158 4678 pet_set.go:239] No StatefulSets found for pod postgresql-master-2-46j9k, StatefulSet controller will avoid syncing I0125 05:16:08.750835 4678 config.go:281] Setting pods for source api I0125 05:16:08.751944 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:08.752043 4678 kubelet_pods.go:785] Killing unwanted pod "postgresql-master-2-46j9k" I0125 05:16:08.752184 4678 docker_manager.go:1536] Killing container "ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d postgresql-master extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" with 0 second grace period I0125 05:16:08.755556 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (176.362952ms) I0125 05:16:08.757284 4678 proxier.go:797] syncProxyRules took 301.532746ms I0125 05:16:08.757305 4678 proxier.go:566] OnEndpointsUpdate took 301.674793ms for 6 endpoints I0125 05:16:08.758109 4678 proxier.go:381] Received update notice: [] I0125 05:16:08.758416 4678 proxier.go:804] Syncing iptables rules I0125 05:16:08.758431 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:08.778412 4678 docker_manager.go:1577] Container "ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d postgresql-master extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" exited after 26.182333ms W0125 05:16:08.778465 4678 docker_manager.go:1583] No ref for pod '"ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d postgresql-master extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k"' I0125 05:16:08.778751 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:08.795131 4678 status_manager.go:443] Pod "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" fully terminated and removed from etcd I0125 05:16:08.795450 4678 docker_manager.go:1459] Calling network plugin kubernetes.io/no-op to tear down pod for postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:16:08.804354 4678 docker_manager.go:1536] Killing container "e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" with 0 second grace period I0125 05:16:08.804637 4678 audit.go:125] 2017-01-25T05:16:08.804591979-05:00 AUDIT: id="15350f07-e137-4280-9733-c7c0d63da8d7" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:08.806315 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.816480 4678 docker_manager.go:1577] Container "e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k" exited after 12.102335ms W0125 05:16:08.816521 4678 docker_manager.go:1583] No ref for pod '"e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650 extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2-46j9k"' I0125 05:16:08.816788 4678 audit.go:45] 2017-01-25T05:16:08.816769151-05:00 AUDIT: id="15350f07-e137-4280-9733-c7c0d63da8d7" response="200" I0125 05:16:08.816918 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (60.931392ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.817460 4678 endpoints_controller.go:495] Update endpoints for extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master, ready: 0 not ready: 0 I0125 05:16:08.820484 4678 audit.go:125] 2017-01-25T05:16:08.820430976-05:00 AUDIT: id="dbc3b8d7-6b62-4789-b5f4-af2fd2ea6581" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:08.822798 4678 audit.go:45] 2017-01-25T05:16:08.822781725-05:00 AUDIT: id="dbc3b8d7-6b62-4789-b5f4-af2fd2ea6581" response="200" I0125 05:16:08.822855 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (4.806783ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:08.823258 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (67.66249ms) I0125 05:16:08.825157 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.840253 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.853881 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:08.867934 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:08.874289 4678 audit.go:125] 2017-01-25T05:16:08.874222297-05:00 AUDIT: id="e736ea08-a5a5-4448-8021-27b799ee41bb" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master" I0125 05:16:08.878346 4678 audit.go:45] 2017-01-25T05:16:08.878329736-05:00 AUDIT: id="e736ea08-a5a5-4448-8021-27b799ee41bb" response="200" I0125 05:16:08.878661 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master: (4.836998ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:08.880773 4678 audit.go:125] 2017-01-25T05:16:08.880737527-05:00 AUDIT: id="51938723-1d8d-45a0-a907-9628cb4b01db" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-master" I0125 05:16:08.882067 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:08.887700 4678 audit.go:45] 2017-01-25T05:16:08.887671484-05:00 AUDIT: id="51938723-1d8d-45a0-a907-9628cb4b01db" response="200" I0125 05:16:08.888422 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-master: (7.92413ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:08.890455 4678 audit.go:125] 2017-01-25T05:16:08.890419882-05:00 AUDIT: id="c35bf520-1c74-4fde-8c42-b6abc3a22bff" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:08.892145 4678 audit.go:45] 2017-01-25T05:16:08.892130785-05:00 AUDIT: id="c35bf520-1c74-4fde-8c42-b6abc3a22bff" response="200" I0125 05:16:08.892564 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (2.436114ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:08.893941 4678 audit.go:125] 2017-01-25T05:16:08.893899583-05:00 AUDIT: id="3313c940-38d0-495b-950a-2aced5667697" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:08.895406 4678 audit.go:45] 2017-01-25T05:16:08.895391903-05:00 AUDIT: id="3313c940-38d0-495b-950a-2aced5667697" response="200" I0125 05:16:08.895919 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (2.295167ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:08.897510 4678 audit.go:125] 2017-01-25T05:16:08.897474585-05:00 AUDIT: id="7c42db53-eea2-4313-99a8-f5195ab93331" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:08.897582 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:08.906388 4678 audit.go:45] 2017-01-25T05:16:08.906372752-05:00 AUDIT: id="7c42db53-eea2-4313-99a8-f5195ab93331" response="200" I0125 05:16:08.906927 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (9.682016ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:08.908747 4678 audit.go:125] 2017-01-25T05:16:08.908714442-05:00 AUDIT: id="328fc2d0-63a3-4a14-af1f-dda7c17abb81" ip="172.18.7.222" method="PUT" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:08.913078 4678 audit.go:45] 2017-01-25T05:16:08.913064196-05:00 AUDIT: id="328fc2d0-63a3-4a14-af1f-dda7c17abb81" response="200" I0125 05:16:08.914340 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (5.907817ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:08.914886 4678 replication_controller.go:322] Observed updated replication controller postgresql-master-2. Desired pod count change: 0->0 I0125 05:16:08.914957 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (33.087µs) I0125 05:16:08.915304 4678 factory.go:154] Replication controller "postgresql-master-2" updated. I0125 05:16:08.917922 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:08.917949 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:08.926348 4678 audit.go:125] 2017-01-25T05:16:08.926291361-05:00 AUDIT: id="0ea5442f-c2ed-4f51-bcb1-1105a930a9d7" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:08.928256 4678 audit.go:45] 2017-01-25T05:16:08.928228083-05:00 AUDIT: id="0ea5442f-c2ed-4f51-bcb1-1105a930a9d7" response="200" I0125 05:16:08.928651 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (2.722337ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:08.930044 4678 audit.go:125] 2017-01-25T05:16:08.930010453-05:00 AUDIT: id="4b38c5f8-9f2f-46b5-a7a9-e6717e1460fa" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2" I0125 05:16:08.933856 4678 audit.go:45] 2017-01-25T05:16:08.933845294-05:00 AUDIT: id="4b38c5f8-9f2f-46b5-a7a9-e6717e1460fa" response="200" I0125 05:16:08.933902 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-master-2: (4.111569ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:08.935139 4678 audit.go:125] 2017-01-25T05:16:08.935116322-05:00 AUDIT: id="7e0ee26e-28e8-4a35-a64c-704edba9c80f" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master" I0125 05:16:08.936116 4678 factory.go:181] Replication controller "postgresql-master-2" deleted. I0125 05:16:08.936412 4678 replication_controller.go:660] Replication Controller has been deleted extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2 I0125 05:16:08.936427 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master-2" (22.696µs) I0125 05:16:08.938666 4678 audit.go:45] 2017-01-25T05:16:08.938656315-05:00 AUDIT: id="7e0ee26e-28e8-4a35-a64c-704edba9c80f" response="200" I0125 05:16:08.938703 4678 panics.go:76] DELETE /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-master: (3.753134ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:08.939045 4678 factory.go:140] Deleting deployment config "postgresql-master" I0125 05:16:08.939066 4678 factory.go:265] Deployment config "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" has been deleted I0125 05:16:08.942037 4678 proxier.go:797] syncProxyRules took 183.619122ms I0125 05:16:08.942055 4678 proxier.go:431] OnServiceUpdate took 183.930819ms for 4 services I0125 05:16:08.942088 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:08.942222 4678 proxier.go:804] Syncing iptables rules I0125 05:16:08.942237 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:08.951312 4678 audit.go:125] 2017-01-25T05:16:08.95121606-05:00 AUDIT: id="3e8157bc-c823-45b6-a7be-8bfbf1cfb627" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave" I0125 05:16:08.953858 4678 audit.go:45] 2017-01-25T05:16:08.953843492-05:00 AUDIT: id="3e8157bc-c823-45b6-a7be-8bfbf1cfb627" response="200" I0125 05:16:08.954218 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave: (12.042173ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:08.955619 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:08.955644 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:08.955655 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:08.955665 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:08.955675 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:08.955680 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:08.955685 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:08.956529 4678 audit.go:125] 2017-01-25T05:16:08.956489252-05:00 AUDIT: id="a3f63c34-08e1-4057-aa59-707f233ad47c" ip="172.18.7.222" method="PUT" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave" I0125 05:16:08.960212 4678 audit.go:45] 2017-01-25T05:16:08.960185146-05:00 AUDIT: id="a3f63c34-08e1-4057-aa59-707f233ad47c" response="200" I0125 05:16:08.960473 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave: (4.265159ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:08.961021 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:16:08.962083 4678 audit.go:125] 2017-01-25T05:16:08.96205422-05:00 AUDIT: id="1534e402-8ec4-464b-a8eb-7f2184184d98" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status" I0125 05:16:08.964539 4678 audit.go:45] 2017-01-25T05:16:08.964527337-05:00 AUDIT: id="1534e402-8ec4-464b-a8eb-7f2184184d98" response="200" I0125 05:16:08.964639 4678 panics.go:76] PUT /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave/status: (2.778201ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:08.965055 4678 controller.go:297] Updated the status for "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" (observed generation: 3) I0125 05:16:08.965283 4678 factory.go:122] Updating deployment config "postgresql-slave" I0125 05:16:08.969856 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:08.983800 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:08.994827 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:09.005141 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:09.015123 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:09.024773 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:09.032582 4678 audit.go:125] 2017-01-25T05:16:09.032526837-05:00 AUDIT: id="da49a88b-a5c0-42a8-b6b2-36ba7c145d42" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:16:09.033224 4678 audit.go:45] 2017-01-25T05:16:09.033194595-05:00 AUDIT: id="da49a88b-a5c0-42a8-b6b2-36ba7c145d42" response="200" I0125 05:16:09.033640 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.47678ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:09.038235 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:09.057412 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:09.073213 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:09.073254 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:09.090191 4678 proxier.go:797] syncProxyRules took 147.966464ms I0125 05:16:09.090234 4678 proxier.go:566] OnEndpointsUpdate took 148.085633ms for 6 endpoints I0125 05:16:09.090283 4678 proxier.go:381] Received update notice: [] I0125 05:16:09.090332 4678 proxier.go:804] Syncing iptables rules I0125 05:16:09.090347 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:09.112316 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:09.130587 4678 audit.go:125] 2017-01-25T05:16:09.130547036-05:00 AUDIT: id="c875761e-9c79-4a97-9c36-e081fb240f18" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:16:09.130662 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:09.139939 4678 audit.go:45] 2017-01-25T05:16:09.139920607-05:00 AUDIT: id="c875761e-9c79-4a97-9c36-e081fb240f18" response="200" I0125 05:16:09.140928 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (10.666222ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:09.141630 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:16:09.143535 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:09.153378 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:09.162562 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:09.171845 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:09.181257 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:09.191830 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:09.203277 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql has no endpoints" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j REJECT COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql cluster IP" -m tcp -p tcp -d 172.30.122.147/32 --dport 5432 -j KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:09.203311 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:09.215117 4678 proxier.go:797] syncProxyRules took 124.78204ms I0125 05:16:09.215140 4678 proxier.go:431] OnServiceUpdate took 124.839468ms for 4 services I0125 05:16:09.260997 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:09.261022 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:09.261696 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:09.261718 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:09.262099 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc4391db280 -1 [] true false map[] 0xc42b0683c0 } I0125 05:16:09.262162 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:09.262339 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc439136920 -1 [] true false map[] 0xc4271170e0 } I0125 05:16:09.262387 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:09.330343 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:09.330365 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:09.331033 4678 audit.go:125] 2017-01-25T05:16:09.330994556-05:00 AUDIT: id="53c83a83-eb90-4bef-8064-01590bbb9aee" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:16:09.332070 4678 audit.go:45] 2017-01-25T05:16:09.332055175-05:00 AUDIT: id="53c83a83-eb90-4bef-8064-01590bbb9aee" response="200" I0125 05:16:09.332160 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.553646ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:09.332467 4678 controller.go:106] Found 0 cronjobs I0125 05:16:09.334535 4678 audit.go:125] 2017-01-25T05:16:09.334506736-05:00 AUDIT: id="565efb5b-382e-4ac1-b17c-b5ff329888bd" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:16:09.335515 4678 audit.go:45] 2017-01-25T05:16:09.335501482-05:00 AUDIT: id="565efb5b-382e-4ac1-b17c-b5ff329888bd" response="200" I0125 05:16:09.335582 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.873131ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:09.335806 4678 controller.go:114] Found 0 jobs I0125 05:16:09.335817 4678 controller.go:117] Found 0 groups I0125 05:16:09.411942 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-helper-1-cpv6d I0125 05:16:09.412000 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:16:09.481139 4678 kubelet_pods.go:1029] Generating status for "postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8(daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:09.481139 4678 kubelet_pods.go:1029] Generating status for "postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8(b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:09.481302 4678 status_manager.go:402] Status for pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" is up-to-date; skipping I0125 05:16:09.481382 4678 status_manager.go:402] Status for pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" is up-to-date; skipping I0125 05:16:09.690568 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:09.696283 4678 kubelet_volumes.go:113] Orphaned pod "b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:16:09.696960 4678 kubelet_volumes.go:104] Orphaned pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" found, but volumes are not cleaned up I0125 05:16:09.967839 4678 audit.go:125] 2017-01-25T05:16:09.967794023-05:00 AUDIT: id="2f7679f9-13ed-48be-bc15-4832995d2ecb" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave" I0125 05:16:09.969278 4678 audit.go:45] 2017-01-25T05:16:09.969263505-05:00 AUDIT: id="2f7679f9-13ed-48be-bc15-4832995d2ecb" response="200" I0125 05:16:09.969501 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave: (1.934722ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:09.970632 4678 audit.go:125] 2017-01-25T05:16:09.970601568-05:00 AUDIT: id="d86a3d41-6fc3-46c4-b4bd-72a101f3775f" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-slave" I0125 05:16:09.971860 4678 audit.go:45] 2017-01-25T05:16:09.971845491-05:00 AUDIT: id="d86a3d41-6fc3-46c4-b4bd-72a101f3775f" response="200" I0125 05:16:09.972193 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers?labelSelector=openshift.io%2Fdeployment-config.name%3Dpostgresql-slave: (1.75807ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.973037 4678 audit.go:125] 2017-01-25T05:16:09.973009627-05:00 AUDIT: id="d4f02878-df28-49b1-a721-263b2104357e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:16:09.973856 4678 audit.go:45] 2017-01-25T05:16:09.973844213-05:00 AUDIT: id="d4f02878-df28-49b1-a721-263b2104357e" response="200" I0125 05:16:09.974144 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (1.282553ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.974970 4678 audit.go:125] 2017-01-25T05:16:09.974948098-05:00 AUDIT: id="74a8c812-e926-49c7-9537-c92703abd2c5" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:09.975799 4678 audit.go:45] 2017-01-25T05:16:09.975788636-05:00 AUDIT: id="74a8c812-e926-49c7-9537-c92703abd2c5" response="200" I0125 05:16:09.976061 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (1.260766ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.976851 4678 audit.go:125] 2017-01-25T05:16:09.976825783-05:00 AUDIT: id="ab9777f8-4497-4d5c-a88d-f7a12550dd2e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:16:09.977657 4678 audit.go:45] 2017-01-25T05:16:09.977647113-05:00 AUDIT: id="ab9777f8-4497-4d5c-a88d-f7a12550dd2e" response="200" I0125 05:16:09.977931 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (1.269394ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.978990 4678 audit.go:125] 2017-01-25T05:16:09.978964924-05:00 AUDIT: id="18c2ee92-36ee-4d44-87ba-679195a556e1" ip="172.18.7.222" method="PUT" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:16:09.981411 4678 audit.go:45] 2017-01-25T05:16:09.981396139-05:00 AUDIT: id="18c2ee92-36ee-4d44-87ba-679195a556e1" response="200" I0125 05:16:09.981714 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (2.914681ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.982346 4678 replication_controller.go:322] Observed updated replication controller postgresql-slave-1. Desired pod count change: 0->0 I0125 05:16:09.982419 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (34.115µs) I0125 05:16:09.982657 4678 factory.go:154] Replication controller "postgresql-slave-1" updated. I0125 05:16:09.983550 4678 audit.go:125] 2017-01-25T05:16:09.983526226-05:00 AUDIT: id="042b848e-f4c8-48b4-ace2-40968457f4f0" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:16:09.984333 4678 audit.go:45] 2017-01-25T05:16:09.984323035-05:00 AUDIT: id="042b848e-f4c8-48b4-ace2-40968457f4f0" response="200" I0125 05:16:09.984593 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (1.208177ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.985467 4678 audit.go:125] 2017-01-25T05:16:09.985441174-05:00 AUDIT: id="658ffdf2-abef-473e-b94d-16068222d465" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1" I0125 05:16:09.988478 4678 audit.go:45] 2017-01-25T05:16:09.988463202-05:00 AUDIT: id="658ffdf2-abef-473e-b94d-16068222d465" response="200" I0125 05:16:09.988533 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers/postgresql-slave-1: (3.264071ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.989169 4678 factory.go:181] Replication controller "postgresql-slave-1" deleted. I0125 05:16:09.989332 4678 replication_controller.go:660] Replication Controller has been deleted extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1 I0125 05:16:09.989351 4678 replication_controller.go:647] Finished syncing controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" (28.097µs) I0125 05:16:09.989756 4678 audit.go:125] 2017-01-25T05:16:09.989727888-05:00 AUDIT: id="0466c32a-99f9-48d3-957a-58ca7a48f641" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=openshift.io%2Fdeployer-pod-for.name%3Dpostgresql-slave-1" I0125 05:16:09.990815 4678 audit.go:45] 2017-01-25T05:16:09.990803516-05:00 AUDIT: id="0466c32a-99f9-48d3-957a-58ca7a48f641" response="200" I0125 05:16:09.991133 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods?labelSelector=openshift.io%2Fdeployer-pod-for.name%3Dpostgresql-slave-1: (1.608063ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.992146 4678 audit.go:125] 2017-01-25T05:16:09.992122327-05:00 AUDIT: id="1cb3429d-7a80-40af-a7b9-498e49a78972" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy" I0125 05:16:09.994346 4678 config.go:281] Setting pods for source api I0125 05:16:09.994672 4678 replica_set.go:320] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11131 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11256 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp:2017-01-25 05:16:09.993099681 -0500 EST DeletionGracePeriodSeconds:0xc43352c388 Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:09.994736 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).updatePod, timestamp 2017-01-25 05:16:09.993099681 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy", UID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11256", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935916, nsec:511595799, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc4344d0340), DeletionGracePeriodSeconds:(*int64)(0xc43352c388), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-slave-1"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc4325e6780), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-slave-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc4325e6840), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc43352c430), ActiveDeadlineSeconds:(*int64)(0xc43352c438), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc4365a8e40), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935981, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.2", StartTime:(*unversioned.Time)(0xc4344d0580), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc43aa909a0)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d"}}}}. I0125 05:16:09.995014 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-deploy, ReplicaSet controller will avoid syncing I0125 05:16:09.995045 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-deploy, job controller will avoid syncing I0125 05:16:09.995062 4678 daemoncontroller.go:332] Pod postgresql-slave-1-deploy updated. I0125 05:16:09.995063 4678 kubelet.go:1797] SyncLoop (DELETE, "api"): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:09.995086 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-deploy, daemon set controller will avoid syncing I0125 05:16:09.995104 4678 disruption.go:326] updatePod called on pod "postgresql-slave-1-deploy" I0125 05:16:09.995118 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:16:09.995123 4678 disruption.go:329] No matching pdb for pod "postgresql-slave-1-deploy" I0125 05:16:09.995194 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-deploy, StatefulSet controller will avoid syncing I0125 05:16:09.995235 4678 factory.go:211] Replication controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" has been deleted I0125 05:16:09.995275 4678 replication_controller.go:378] Pod postgresql-slave-1-deploy updated, objectMeta {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11131 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp: DeletionGracePeriodSeconds: Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:} -> {Name:postgresql-slave-1-deploy GenerateName: Namespace:extended-test-postgresql-replication-1-34bbd-xd4g8 SelfLink:/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy UID:b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094 ResourceVersion:11256 Generation:0 CreationTimestamp:2017-01-25 05:11:56.511595799 -0500 EST DeletionTimestamp:2017-01-25 05:16:09.993099681 -0500 EST DeletionGracePeriodSeconds:0xc43352c388 Labels:map[openshift.io/deployer-pod-for.name:postgresql-slave-1] Annotations:map[openshift.io/deployment.name:postgresql-slave-1 openshift.io/scc:restricted] OwnerReferences:[] Finalizers:[] ClusterName:}. I0125 05:16:09.995327 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).updatePod, timestamp 2017-01-25 05:16:09.993099681 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-slave-1]. I0125 05:16:09.995363 4678 replication_controller.go:255] No controllers found for pod postgresql-slave-1-deploy, replication manager will avoid syncing I0125 05:16:09.995743 4678 audit.go:125] 2017-01-25T05:16:09.995710563-05:00 AUDIT: id="5bb03717-cc87-4b77-b756-161a4a5895a1" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy" I0125 05:16:09.996813 4678 audit.go:45] 2017-01-25T05:16:09.996798386-05:00 AUDIT: id="1cb3429d-7a80-40af-a7b9-498e49a78972" response="200" I0125 05:16:09.996977 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy: (5.012326ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:09.997142 4678 replication_controller.go:441] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.(*ReplicationManager).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replication.deletePod)-fm, timestamp 2017-01-25 05:16:09.993099681 -0500 EST, labels map[openshift.io/deployer-pod-for.name:postgresql-slave-1]. I0125 05:16:09.997188 4678 replication_controller.go:255] No controllers found for pod postgresql-slave-1-deploy, replication manager will avoid syncing I0125 05:16:09.997225 4678 audit.go:45] 2017-01-25T05:16:09.99719493-05:00 AUDIT: id="5bb03717-cc87-4b77-b756-161a4a5895a1" response="200" I0125 05:16:09.997294 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy: (1.811791ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:09.997218 4678 replica_set.go:382] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.(*ReplicaSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/replicaset.deletePod)-fm, timestamp 2017-01-25 05:16:09.993099681 -0500 EST: &api.Pod{TypeMeta:unversioned.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:api.ObjectMeta{Name:"postgresql-slave-1-deploy", GenerateName:"", Namespace:"extended-test-postgresql-replication-1-34bbd-xd4g8", SelfLink:"/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy", UID:"b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094", ResourceVersion:"11257", Generation:0, CreationTimestamp:unversioned.Time{Time:time.Time{sec:63620935916, nsec:511595799, loc:(*time.Location)(0xa2479e0)}}, DeletionTimestamp:(*unversioned.Time)(0xc42c7c3440), DeletionGracePeriodSeconds:(*int64)(0xc4344f41d8), Labels:map[string]string{"openshift.io/deployer-pod-for.name":"postgresql-slave-1"}, Annotations:map[string]string{"openshift.io/deployment.name":"postgresql-slave-1", "openshift.io/scc":"restricted"}, OwnerReferences:[]api.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:api.PodSpec{Volumes:[]api.Volume{api.Volume{Name:"deployer-token-r7jj8", VolumeSource:api.VolumeSource{HostPath:(*api.HostPathVolumeSource)(nil), EmptyDir:(*api.EmptyDirVolumeSource)(nil), GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)(nil), AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)(nil), GitRepo:(*api.GitRepoVolumeSource)(nil), Secret:(*api.SecretVolumeSource)(0xc427d3fbc0), NFS:(*api.NFSVolumeSource)(nil), ISCSI:(*api.ISCSIVolumeSource)(nil), Glusterfs:(*api.GlusterfsVolumeSource)(nil), PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)(nil), RBD:(*api.RBDVolumeSource)(nil), Quobyte:(*api.QuobyteVolumeSource)(nil), FlexVolume:(*api.FlexVolumeSource)(nil), Cinder:(*api.CinderVolumeSource)(nil), CephFS:(*api.CephFSVolumeSource)(nil), Flocker:(*api.FlockerVolumeSource)(nil), DownwardAPI:(*api.DownwardAPIVolumeSource)(nil), FC:(*api.FCVolumeSource)(nil), AzureFile:(*api.AzureFileVolumeSource)(nil), ConfigMap:(*api.ConfigMapVolumeSource)(nil), VsphereVolume:(*api.VsphereVirtualDiskVolumeSource)(nil), AzureDisk:(*api.AzureDiskVolumeSource)(nil), PhotonPersistentDisk:(*api.PhotonPersistentDiskVolumeSource)(nil)}}}, InitContainers:[]api.Container(nil), Containers:[]api.Container{api.Container{Name:"deployment", Image:"openshift/origin-deployer:86a9783", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]api.ContainerPort(nil), Env:[]api.EnvVar{api.EnvVar{Name:"KUBERNETES_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_MASTER", Value:"https://172.18.7.222:8443", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"BEARER_TOKEN_FILE", Value:"/var/run/secrets/kubernetes.io/serviceaccount/token", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_CA_DATA", Value:"-----BEGIN CERTIFICATE-----\nMIIC6jCCAdKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAmMSQwIgYDVQQDDBtvcGVu\nc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkwHhcNMTcwMTI1MDgzOTE4WhcNMjIwMTI0\nMDgzOTE5WjAmMSQwIgYDVQQDDBtvcGVuc2hpZnQtc2lnbmVyQDE0ODUzMzM1NTkw\nggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCoKCT9/cxH+wzp19ADAHdH\n6nHsY3aU4lwz6xeZwFpHNa7c2UWvqF4LKhX5zcrlFqGlKc9IrS9+mAeH6BLsfMbp\nMBWv0QQdR8dDwEP2RooeJi+NUNiFfLfn+3aIoMGnbpS1wrreo/+7cl6KbWNmvhrQ\nxPnoEVS5cdAVZSwAVIfUPYXxqVDXaezVP/MqgWtqQVwrozY4OVAvvOZ1dKhKeRWq\nW/7V3Jds+NhWeSWpVxQ75XhRQSQWeo333tLDPKlcCvarxhdaP6DjCFRrtk1ymEk4\nLsAE2xJLQKBSxeSDFEyFhpOBOw5H5VbBQ/P1e8+ujz7nhapKjj5PZTpfomiKVX5t\nAgMBAAGjIzAhMA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG\nSIb3DQEBCwUAA4IBAQCPn5PEHslWclypC5d3wtshIFKWYAAZhonTitTlnKk/MqJ+\nTBq7cHfP9o8lltvm2kIC+vkSwrsdPo3v1mVAbUF0E48HqLLJKyn/u05h6cwvaet+\nOrepDY+MDNt5v4hozZxssPQfiQLn4G1CiAwkTQnLlRIThe8cYWjPd3WYpRD/VOHW\nJDEnWeRzDqjeJajeYF3Oxjm2e75IjvtZyxUTHVtQ45bT6SPYLInhJ59CCXvmnJBZ\n4FGhkDnOK2oqgBcVVgLbwTr97JPiv72+EiZSqSKdjlFIN+fMHbZWdfdzOJm4TJO5\nWymKLIBJMroc+Dwv1gAGXPKFnzNjvjjg7tMeT83M\n-----END CERTIFICATE-----\n", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAME", Value:"postgresql-slave-1", ValueFrom:(*api.EnvVarSource)(nil)}, api.EnvVar{Name:"OPENSHIFT_DEPLOYMENT_NAMESPACE", Value:"extended-test-postgresql-replication-1-34bbd-xd4g8", ValueFrom:(*api.EnvVarSource)(nil)}}, Resources:api.ResourceRequirements{Limits:api.ResourceList(nil), Requests:api.ResourceList(nil)}, VolumeMounts:[]api.VolumeMount{api.VolumeMount{Name:"deployer-token-r7jj8", ReadOnly:true, MountPath:"/var/run/secrets/kubernetes.io/serviceaccount", SubPath:""}}, LivenessProbe:(*api.Probe)(nil), ReadinessProbe:(*api.Probe)(nil), Lifecycle:(*api.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", ImagePullPolicy:"IfNotPresent", SecurityContext:(*api.SecurityContext)(0xc427d3fe00), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Never", TerminationGracePeriodSeconds:(*int64)(0xc4344f42b0), ActiveDeadlineSeconds:(*int64)(0xc4344f42b8), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"deployer", NodeName:"172.18.7.222", SecurityContext:(*api.PodSecurityContext)(0xc4365999c0), ImagePullSecrets:[]api.LocalObjectReference{api.LocalObjectReference{Name:"deployer-dockercfg-4rhpp"}}, Hostname:"", Subdomain:""}, Status:api.PodStatus{Phase:"Succeeded", Conditions:[]api.PodCondition{api.PodCondition{Type:"Initialized", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"Ready", Status:"False", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935981, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"PodCompleted", Message:""}, api.PodCondition{Type:"PodScheduled", Status:"True", LastProbeTime:unversioned.Time{Time:time.Time{sec:0, nsec:0, loc:(*time.Location)(nil)}}, LastTransitionTime:unversioned.Time{Time:time.Time{sec:63620935916, nsec:0, loc:(*time.Location)(0xa2479e0)}}, Reason:"", Message:""}}, Message:"", Reason:"", HostIP:"172.18.7.222", PodIP:"172.17.0.2", StartTime:(*unversioned.Time)(0xc42c7c37a0), InitContainerStatuses:[]api.ContainerStatus(nil), ContainerStatuses:[]api.ContainerStatus{api.ContainerStatus{Name:"deployment", State:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(0xc437762af0)}, LastTerminationState:api.ContainerState{Waiting:(*api.ContainerStateWaiting)(nil), Running:(*api.ContainerStateRunning)(nil), Terminated:(*api.ContainerStateTerminated)(nil)}, Ready:false, RestartCount:0, Image:"openshift/origin-deployer:86a9783", ImageID:"docker://sha256:395cb82ec0f527340fbfe7f182d421596b4115732333d940b83245bedbfd12d8", ContainerID:"docker://269d8959d1b2d841269f6ec26993101591e2e0cd5f3c5678fd7a6654b6c5c82d"}}}}. I0125 05:16:09.997494 4678 replica_set.go:196] No ReplicaSets found for pod postgresql-slave-1-deploy, ReplicaSet controller will avoid syncing I0125 05:16:09.997514 4678 jobcontroller.go:141] No jobs found for pod postgresql-slave-1-deploy, job controller will avoid syncing I0125 05:16:09.997524 4678 daemoncontroller.go:367] Pod postgresql-slave-1-deploy deleted. I0125 05:16:09.997545 4678 daemoncontroller.go:261] No daemon sets found for pod postgresql-slave-1-deploy, daemon set controller will avoid syncing I0125 05:16:09.997554 4678 disruption.go:355] deletePod called on pod "postgresql-slave-1-deploy" I0125 05:16:09.997567 4678 disruption.go:389] No PodDisruptionBudgets found for pod postgresql-slave-1-deploy, PodDisruptionBudget controller will avoid syncing. I0125 05:16:09.997573 4678 disruption.go:358] No matching pdb for pod "postgresql-slave-1-deploy" I0125 05:16:09.997605 4678 pet_set.go:210] Pod extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy deleted through github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.(*StatefulSetController).(github.com/openshift/origin/vendor/k8s.io/kubernetes/pkg/controller/petset.deletePod)-fm. I0125 05:16:09.997617 4678 pet_set.go:239] No StatefulSets found for pod postgresql-slave-1-deploy, StatefulSet controller will avoid syncing I0125 05:16:09.997640 4678 factory.go:211] Replication controller "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1" has been deleted I0125 05:16:09.997794 4678 config.go:281] Setting pods for source api I0125 05:16:09.998339 4678 kubelet.go:1791] SyncLoop (REMOVE, "api"): "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:09.998348 4678 audit.go:125] 2017-01-25T05:16:09.998316219-05:00 AUDIT: id="cd093521-8393-40e6-968d-15b06cc1a4ed" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status" I0125 05:16:09.998375 4678 kubelet.go:1976] Failed to delete pod "postgresql-slave-1-deploy_extended-test-postgresql-replication-1-34bbd-xd4g8(b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094)", err: pod not found I0125 05:16:09.999282 4678 audit.go:125] 2017-01-25T05:16:09.999246844-05:00 AUDIT: id="08741ac7-3dc3-421a-9f52-674c937e8511" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave" I0125 05:16:09.999424 4678 audit.go:45] 2017-01-25T05:16:09.999413739-05:00 AUDIT: id="cd093521-8393-40e6-968d-15b06cc1a4ed" response="409" I0125 05:16:09.999471 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods/postgresql-slave-1-deploy/status: (1.404319ms) 409 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] W0125 05:16:09.999708 4678 status_manager.go:451] Failed to update status for pod "_()": Operation cannot be fulfilled on pods "postgresql-slave-1-deploy": StorageError: invalid object, Code: 4, Key: kubernetes.io/pods/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave-1-deploy, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094, UID in object meta: I0125 05:16:10.001932 4678 audit.go:45] 2017-01-25T05:16:10.00191821-05:00 AUDIT: id="08741ac7-3dc3-421a-9f52-674c937e8511" response="200" I0125 05:16:10.001981 4678 panics.go:76] DELETE /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs/postgresql-slave: (2.98614ms) 200 [[oc/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:41076] I0125 05:16:10.002321 4678 factory.go:140] Deleting deployment config "postgresql-slave" I0125 05:16:10.002345 4678 factory.go:265] Deployment config "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" has been deleted I0125 05:16:10.002774 4678 audit.go:125] 2017-01-25T05:16:10.00275183-05:00 AUDIT: id="97a82beb-c5d7-45ea-ad5b-629fd6680922" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments" I0125 05:16:10.003654 4678 audit.go:45] 2017-01-25T05:16:10.00364368-05:00 AUDIT: id="97a82beb-c5d7-45ea-ad5b-629fd6680922" response="200" I0125 05:16:10.003740 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments: (1.159658ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.004385 4678 audit.go:125] 2017-01-25T05:16:10.004361546-05:00 AUDIT: id="3c431c15-2dd2-49a4-ac13-1b95a98662c0" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers" I0125 05:16:10.005163 4678 audit.go:45] 2017-01-25T05:16:10.005153326-05:00 AUDIT: id="3c431c15-2dd2-49a4-ac13-1b95a98662c0" response="200" I0125 05:16:10.005245 4678 panics.go:76] GET /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers: (1.062811ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.005887 4678 audit.go:125] 2017-01-25T05:16:10.005862951-05:00 AUDIT: id="6efa7353-ac90-41c5-b070-3c613efe93a1" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:10.006728 4678 audit.go:45] 2017-01-25T05:16:10.006718498-05:00 AUDIT: id="6efa7353-ac90-41c5-b070-3c613efe93a1" response="200" I0125 05:16:10.006797 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (1.123558ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.007496 4678 audit.go:125] 2017-01-25T05:16:10.007469415-05:00 AUDIT: id="50097925-c57e-4b2a-8086-47a9c446104c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/routes" I0125 05:16:10.008382 4678 audit.go:45] 2017-01-25T05:16:10.0083727-05:00 AUDIT: id="50097925-c57e-4b2a-8086-47a9c446104c" response="200" I0125 05:16:10.008433 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/routes: (1.140873ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.009048 4678 audit.go:125] 2017-01-25T05:16:10.009026066-05:00 AUDIT: id="d1f160c1-ba82-4e43-8c9d-07eef44f34e8" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services" I0125 05:16:10.010014 4678 audit.go:45] 2017-01-25T05:16:10.010002942-05:00 AUDIT: id="d1f160c1-ba82-4e43-8c9d-07eef44f34e8" response="200" I0125 05:16:10.010146 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services: (1.300777ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.011389 4678 audit.go:125] 2017-01-25T05:16:10.011361404-05:00 AUDIT: id="5436318c-786b-4f4c-ba8f-d799bbdc741c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-helper" I0125 05:16:10.012189 4678 audit.go:45] 2017-01-25T05:16:10.012179301-05:00 AUDIT: id="5436318c-786b-4f4c-ba8f-d799bbdc741c" response="200" I0125 05:16:10.012252 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-helper: (1.052359ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.013062 4678 audit.go:125] 2017-01-25T05:16:10.01303814-05:00 AUDIT: id="d42103c6-a6d7-4e0b-9af7-75e6075b6da0" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-helper" I0125 05:16:10.017369 4678 config.go:208] Calling handler.OnServiceUpdate() I0125 05:16:10.017401 4678 proxier.go:381] Received update notice: [] I0125 05:16:10.017440 4678 proxier.go:509] Removing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper:postgresql" I0125 05:16:10.017474 4678 proxier.go:804] Syncing iptables rules I0125 05:16:10.017490 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:10.032959 4678 audit.go:125] 2017-01-25T05:16:10.032910098-05:00 AUDIT: id="42f35367-201f-4d9b-904a-fa0c55fbcc08" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper" I0125 05:16:10.033797 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:10.035662 4678 audit.go:45] 2017-01-25T05:16:10.035631163-05:00 AUDIT: id="42f35367-201f-4d9b-904a-fa0c55fbcc08" response="404" I0125 05:16:10.035753 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-helper: (18.347825ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:10.036140 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-helper" endpoints. (19.175959ms) I0125 05:16:10.037041 4678 rest.go:568] Service type: ClusterIP does not need health check node port I0125 05:16:10.037095 4678 audit.go:45] 2017-01-25T05:16:10.037082777-05:00 AUDIT: id="d42103c6-a6d7-4e0b-9af7-75e6075b6da0" response="200" I0125 05:16:10.037152 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-helper: (24.278724ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.038323 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:10.062844 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.077600 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.089706 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.100254 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:10.109895 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:10.119174 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:10.129447 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:10.140791 4678 audit.go:125] 2017-01-25T05:16:10.14075064-05:00 AUDIT: id="5f5bad5a-6c4a-4fc3-b42f-4ae65caea3b8" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-master" I0125 05:16:10.141689 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-T2TLQTY2NRIUTPUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -X KUBE-SVC-T2TLQTY2NRIUTPUX -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:10.141721 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:10.147637 4678 audit.go:45] 2017-01-25T05:16:10.147606686-05:00 AUDIT: id="5f5bad5a-6c4a-4fc3-b42f-4ae65caea3b8" response="200" I0125 05:16:10.147873 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-master: (7.321811ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.155012 4678 proxier.go:797] syncProxyRules took 137.535621ms I0125 05:16:10.155041 4678 proxier.go:431] OnServiceUpdate took 137.626787ms for 3 services I0125 05:16:10.155113 4678 proxier.go:804] Syncing iptables rules I0125 05:16:10.155122 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:10.159998 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.160063 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.160076 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.160085 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.160094 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:10.160104 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.160111 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.165137 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:10.174715 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.184543 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.203242 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.217611 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:10.228554 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:10.239628 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:10.250721 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:10.263387 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:10.263422 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:10.276413 4678 proxier.go:797] syncProxyRules took 121.295018ms I0125 05:16:10.276437 4678 proxier.go:566] OnEndpointsUpdate took 242.551988ms for 5 endpoints I0125 05:16:10.276477 4678 proxier.go:381] Received update notice: [] I0125 05:16:10.276508 4678 proxier.go:804] Syncing iptables rules I0125 05:16:10.276516 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:10.287965 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:10.298320 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.312596 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.333930 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.349533 4678 audit.go:125] 2017-01-25T05:16:10.349446649-05:00 AUDIT: id="f8273da7-a53b-4bfd-9155-8ca8269a7a68" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-master" I0125 05:16:10.357480 4678 config.go:208] Calling handler.OnServiceUpdate() I0125 05:16:10.357510 4678 proxier.go:381] Received update notice: [] I0125 05:16:10.363238 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:10.364313 4678 audit.go:125] 2017-01-25T05:16:10.364253875-05:00 AUDIT: id="58cbb11e-3827-4784-9893-bd7d49b1f5a1" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master" I0125 05:16:10.393713 4678 rest.go:568] Service type: ClusterIP does not need health check node port I0125 05:16:10.393789 4678 audit.go:45] 2017-01-25T05:16:10.393769015-05:00 AUDIT: id="f8273da7-a53b-4bfd-9155-8ca8269a7a68" response="200" I0125 05:16:10.393879 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-master: (44.872266ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.395116 4678 store.go:208] deletion of kubernetes.io/services/endpoints/extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master failed because of a conflict, going to retry I0125 05:16:10.395171 4678 audit.go:45] 2017-01-25T05:16:10.395159076-05:00 AUDIT: id="58cbb11e-3827-4784-9893-bd7d49b1f5a1" response="404" I0125 05:16:10.395245 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-master: (35.779233ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:10.395627 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-master" endpoints. (36.633113ms) I0125 05:16:10.400674 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:10.430263 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:10.459858 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:10.480527 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:10.480562 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:10.501070 4678 proxier.go:797] syncProxyRules took 224.555544ms I0125 05:16:10.501106 4678 proxier.go:431] OnServiceUpdate took 224.616655ms for 3 services I0125 05:16:10.501139 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:10.501253 4678 proxier.go:804] Syncing iptables rules I0125 05:16:10.501263 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:10.509664 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.509752 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.509764 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.509772 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.509780 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.509787 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:10.509796 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.519084 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:10.532451 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.541048 4678 audit.go:125] 2017-01-25T05:16:10.540964311-05:00 AUDIT: id="344738fc-e59f-47cb-8162-ef42f44e9874" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-slave" I0125 05:16:10.542797 4678 audit.go:45] 2017-01-25T05:16:10.542772923-05:00 AUDIT: id="344738fc-e59f-47cb-8162-ef42f44e9874" response="200" I0125 05:16:10.542951 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-slave: (2.330957ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.544632 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.555938 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.565967 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:10.576516 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:10.589865 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:10.602813 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:10.614495 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:10.614530 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:10.619350 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/host-path/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (spec.Name: "postgresql-data") from pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:16:10.619519 4678 reconciler.go:189] UnmountVolume operation started for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (spec.Name: "default-token-0g2nw") from pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). I0125 05:16:10.619656 4678 util.go:340] Tearing down volume default-token-0g2nw for pod daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:16:10.619753 4678 mount_linux.go:147] Unmounting /mnt/openshift-xfs-vol-dir/pods/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/default-token-0g2nw I0125 05:16:10.633367 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-default-token-0g2nw" (OuterVolumeSpecName: "default-token-0g2nw") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "default-token-0g2nw". PluginName "kubernetes.io/secret", VolumeGidValue "" I0125 05:16:10.633637 4678 operation_executor.go:992] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094-pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" (OuterVolumeSpecName: "postgresql-data") pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" (UID: "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094"). InnerVolumeSpecName "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000". PluginName "kubernetes.io/host-path", VolumeGidValue "" I0125 05:16:10.636177 4678 proxier.go:797] syncProxyRules took 134.920854ms I0125 05:16:10.636226 4678 proxier.go:566] OnEndpointsUpdate took 135.033903ms for 4 endpoints I0125 05:16:10.636261 4678 proxier.go:381] Received update notice: [] I0125 05:16:10.636302 4678 proxier.go:804] Syncing iptables rules I0125 05:16:10.636315 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:10.652840 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:10.669257 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.683744 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.694974 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.706443 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:10.718216 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:10.728149 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:10.737810 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:10.743351 4678 audit.go:125] 2017-01-25T05:16:10.743288514-05:00 AUDIT: id="f9218547-ac98-4468-9bf0-bab8820124e2" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-slave" I0125 05:16:10.751954 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:10.751986 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:10.762786 4678 rest.go:568] Service type: ClusterIP does not need health check node port I0125 05:16:10.762872 4678 audit.go:45] 2017-01-25T05:16:10.762854672-05:00 AUDIT: id="f9218547-ac98-4468-9bf0-bab8820124e2" response="200" I0125 05:16:10.762964 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services/postgresql-slave: (20.075819ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.763550 4678 audit.go:125] 2017-01-25T05:16:10.763498046-05:00 AUDIT: id="f8e264a3-1d85-4879-8b81-8ea2c97c974c" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave" I0125 05:16:10.764819 4678 audit.go:45] 2017-01-25T05:16:10.764808025-05:00 AUDIT: id="f8e264a3-1d85-4879-8b81-8ea2c97c974c" response="404" I0125 05:16:10.764877 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints/postgresql-slave: (13.40002ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:10.765222 4678 endpoints_controller.go:334] Finished syncing service "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-slave" endpoints. (15.036039ms) I0125 05:16:10.766552 4678 audit.go:125] 2017-01-25T05:16:10.766513772-05:00 AUDIT: id="11ece24a-989a-4f78-8380-724f826d49ae" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets" I0125 05:16:10.767593 4678 audit.go:45] 2017-01-25T05:16:10.767578809-05:00 AUDIT: id="11ece24a-989a-4f78-8380-724f826d49ae" response="200" I0125 05:16:10.767674 4678 panics.go:76] GET /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets: (1.429231ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.769282 4678 audit.go:125] 2017-01-25T05:16:10.769259173-05:00 AUDIT: id="07649ccb-aae0-4c16-8f57-59bf85cda0f4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs" I0125 05:16:10.770003 4678 audit.go:45] 2017-01-25T05:16:10.769993041-05:00 AUDIT: id="07649ccb-aae0-4c16-8f57-59bf85cda0f4" response="200" I0125 05:16:10.770057 4678 panics.go:76] GET /apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs: (972.73µs) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.770245 4678 proxier.go:797] syncProxyRules took 133.94206ms I0125 05:16:10.770270 4678 proxier.go:431] OnServiceUpdate took 133.994027ms for 3 services I0125 05:16:10.770295 4678 config.go:99] Calling handler.OnEndpointsUpdate() I0125 05:16:10.770413 4678 proxier.go:804] Syncing iptables rules I0125 05:16:10.770426 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:10.776869 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.776934 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.776950 4678 healthcheck.go:86] LB service health check mutation request Service: default/docker-registry - 1 Endpoints [default/docker-registry] I0125 05:16:10.776963 4678 healthcheck.go:86] LB service health check mutation request Service: default/kubernetes - 0 Endpoints [] I0125 05:16:10.776971 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.776982 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.776990 4678 healthcheck.go:86] LB service health check mutation request Service: default/router - 1 Endpoints [default/router] I0125 05:16:10.777850 4678 audit.go:125] 2017-01-25T05:16:10.777781399-05:00 AUDIT: id="1eb60c1b-a5d5-4ca2-8e57-c9845efbb825" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets" I0125 05:16:10.779531 4678 audit.go:45] 2017-01-25T05:16:10.779517576-05:00 AUDIT: id="1eb60c1b-a5d5-4ca2-8e57-c9845efbb825" response="200" I0125 05:16:10.779643 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets: (2.200437ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.780514 4678 audit.go:125] 2017-01-25T05:16:10.780484445-05:00 AUDIT: id="f5a3303c-190e-44a8-ac08-c6dd2012c3c5" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:16:10.781786 4678 audit.go:45] 2017-01-25T05:16:10.781773065-05:00 AUDIT: id="f5a3303c-190e-44a8-ac08-c6dd2012c3c5" response="200" I0125 05:16:10.781854 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (1.592397ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41076] I0125 05:16:10.784559 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:10.810385 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.828166 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.844016 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:10.867106 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:10.897688 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:10.926676 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:10.958836 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:10.998409 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:10.998449 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:11.039998 4678 proxier.go:797] syncProxyRules took 269.578384ms I0125 05:16:11.040040 4678 proxier.go:566] OnEndpointsUpdate took 269.686389ms for 3 endpoints I0125 05:16:11.040079 4678 proxier.go:381] Received update notice: [] I0125 05:16:11.040138 4678 proxier.go:804] Syncing iptables rules I0125 05:16:11.040153 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:11.066212 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:11.086736 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.097902 4678 audit.go:125] 2017-01-25T05:16:11.097860371-05:00 AUDIT: id="69edd687-b9b1-4586-a2b0-3fd177a90f3e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="openshift" uri="/oapi/v1/namespaces/openshift/imagestreams" I0125 05:16:11.102080 4678 audit.go:45] 2017-01-25T05:16:11.102065411-05:00 AUDIT: id="69edd687-b9b1-4586-a2b0-3fd177a90f3e" response="200" I0125 05:16:11.103860 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.110878 4678 panics.go:76] GET /oapi/v1/namespaces/openshift/imagestreams: (13.298734ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41098] I0125 05:16:11.118874 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.142269 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:11.169526 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:11.194600 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:11.215786 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:11.233986 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:11.234027 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:11.259713 4678 proxier.go:797] syncProxyRules took 219.567943ms I0125 05:16:11.259755 4678 proxier.go:431] OnServiceUpdate took 219.64582ms for 3 services I0125 05:16:11.259810 4678 proxier.go:804] Syncing iptables rules I0125 05:16:11.259826 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:11.283788 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:11.299186 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.325267 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.348915 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.368920 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:11.385291 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:11.408056 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:11.417914 4678 audit.go:125] 2017-01-25T05:16:11.417838184-05:00 AUDIT: id="65c733e9-153e-4d44-b9d9-ce1b0fc6400d" ip="172.18.7.222" method="GET" user="extended-test-postgresql-replication-1-34bbd-xd4g8-user" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams" I0125 05:16:11.419915 4678 audit.go:45] 2017-01-25T05:16:11.419887387-05:00 AUDIT: id="65c733e9-153e-4d44-b9d9-ce1b0fc6400d" response="200" I0125 05:16:11.420047 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams: (13.692517ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41100] I0125 05:16:11.429803 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:11.459855 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:11.459897 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:11.488351 4678 proxier.go:797] syncProxyRules took 228.537472ms I0125 05:16:11.488399 4678 proxier.go:431] OnServiceUpdate took 1.130874371s for 3 services I0125 05:16:11.488435 4678 config.go:208] Calling handler.OnServiceUpdate() I0125 05:16:11.488449 4678 proxier.go:381] Received update notice: [] I0125 05:16:11.488491 4678 proxier.go:804] Syncing iptables rules I0125 05:16:11.488504 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:11.507904 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:11.524490 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.540491 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.559378 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:11.579868 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:11.594082 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:11.610660 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:11.626312 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:11.642567 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:11.642610 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:11.674909 4678 proxier.go:797] syncProxyRules took 186.409615ms I0125 05:16:11.674945 4678 proxier.go:431] OnServiceUpdate took 186.482565ms for 3 services I0125 05:16:11.684623 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:11.687781 4678 kubelet_volumes.go:113] Orphaned pod "b39c8e4d-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:16:11.688407 4678 kubelet_volumes.go:113] Orphaned pod "daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094" found, removing I0125 05:16:11.778415 4678 audit.go:125] 2017-01-25T05:16:11.778376546-05:00 AUDIT: id="19d7d39a-ad04-4697-8ff3-479a9a0d33d2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims" I0125 05:16:11.779724 4678 audit.go:45] 2017-01-25T05:16:11.779712377-05:00 AUDIT: id="19d7d39a-ad04-4697-8ff3-479a9a0d33d2" response="200" I0125 05:16:11.779854 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims: (1.683806ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41102] I0125 05:16:11.781846 4678 audit.go:125] 2017-01-25T05:16:11.781817928-05:00 AUDIT: id="aed74087-30b4-4c3f-8c17-fe110be38b08" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim" I0125 05:16:11.784264 4678 audit.go:45] 2017-01-25T05:16:11.784250788-05:00 AUDIT: id="aed74087-30b4-4c3f-8c17-fe110be38b08" response="200" I0125 05:16:11.784367 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims/postgresql-data-claim: (2.720124ms) 200 [[oc/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:41102] I0125 05:16:11.784920 4678 pv_controller_base.go:425] claim "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim" deleted I0125 05:16:11.784953 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Bound, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:16:11.784966 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:16:11.784982 4678 pv_controller.go:413] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim not found I0125 05:16:11.784993 4678 pv_controller.go:441] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" is released and reclaim policy "Retain" will be executed I0125 05:16:11.785003 4678 pv_controller.go:643] updating PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: set phase Released I0125 05:16:11.788937 4678 audit.go:125] 2017-01-25T05:16:11.788905094-05:00 AUDIT: id="cc740d11-2399-407b-bad9-d2cbffd736f2" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:pv-binder-controller" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000/status" I0125 05:16:11.790652 4678 audit.go:45] 2017-01-25T05:16:11.790641631-05:00 AUDIT: id="cc740d11-2399-407b-bad9-d2cbffd736f2" response="200" I0125 05:16:11.790710 4678 panics.go:76] PUT /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000/status: (5.1181ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:pv-binder-controller] 172.18.7.222:50846] I0125 05:16:11.791275 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 11267 I0125 05:16:11.791310 4678 pv_controller.go:379] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: phase: Released, bound to: "extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim (uid: b366f89d-e2e6-11e6-a4b0-0e6a5cbf0094)", boundByController: true I0125 05:16:11.791323 4678 pv_controller.go:404] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: volume is bound to claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim I0125 05:16:11.791339 4678 pv_controller.go:413] synchronizing PersistentVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: claim extended-test-postgresql-replication-1-34bbd-xd4g8/postgresql-data-claim not found I0125 05:16:11.791348 4678 pv_controller.go:925] reclaimVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: policy is Retain, nothing to do I0125 05:16:11.791434 4678 pv_controller_base.go:607] storeObjectUpdate updating volume "/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" with version 11267 I0125 05:16:11.791454 4678 pv_controller.go:672] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" entered phase "Released" I0125 05:16:11.791464 4678 pv_controller.go:925] reclaimVolume[pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000]: policy is Retain, nothing to do I0125 05:16:11.792895 4678 audit.go:125] 2017-01-25T05:16:11.792866694-05:00 AUDIT: id="8dda9348-bf5e-4ed0-86b5-d1ccbd53b1e7" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes" I0125 05:16:11.793785 4678 audit.go:45] 2017-01-25T05:16:11.793775226-05:00 AUDIT: id="8dda9348-bf5e-4ed0-86b5-d1ccbd53b1e7" response="200" I0125 05:16:11.793892 4678 panics.go:76] GET /api/v1/persistentvolumes: (1.258767ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.794780 4678 audit.go:125] 2017-01-25T05:16:11.79475901-05:00 AUDIT: id="fb86cb5a-0362-4185-8669-953be9c38d3b" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:16:11.795614 4678 audit.go:45] 2017-01-25T05:16:11.795604004-05:00 AUDIT: id="fb86cb5a-0362-4185-8669-953be9c38d3b" response="200" I0125 05:16:11.795739 4678 panics.go:76] GET /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (1.128003ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.796477 4678 audit.go:125] 2017-01-25T05:16:11.796455145-05:00 AUDIT: id="2f08b005-52f3-457f-b549-f6c1dd512631" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" I0125 05:16:11.798853 4678 audit.go:45] 2017-01-25T05:16:11.798843417-05:00 AUDIT: id="2f08b005-52f3-457f-b549-f6c1dd512631" response="200" I0125 05:16:11.798931 4678 panics.go:76] DELETE /api/v1/persistentvolumes/pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000: (2.656582ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.799362 4678 pv_controller_base.go:316] volume "pv-extended-test-postgresql-replication-1-34bbd-xd4g8-0000" deleted I0125 05:16:11.810366 4678 audit.go:125] 2017-01-25T05:16:11.810340617-05:00 AUDIT: id="f571a1b9-0d9a-4cb4-b3a8-05d9b895f44c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events" I0125 05:16:11.811482 4678 audit.go:45] 2017-01-25T05:16:11.81146825-05:00 AUDIT: id="f571a1b9-0d9a-4cb4-b3a8-05d9b895f44c" response="200" I0125 05:16:11.811570 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events: (1.438312ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.817278 4678 audit.go:125] 2017-01-25T05:16:11.817241151-05:00 AUDIT: id="86aeff23-dde6-4d03-8eb6-026afe93854c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:11.818514 4678 audit.go:45] 2017-01-25T05:16:11.818498552-05:00 AUDIT: id="86aeff23-dde6-4d03-8eb6-026afe93854c" response="200" I0125 05:16:11.818770 4678 panics.go:76] GET /api/v1/nodes: (1.7195ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.819448 4678 audit.go:125] 2017-01-25T05:16:11.819428602-05:00 AUDIT: id="809bbec6-2289-4678-b7ed-4ed5eaf9a070" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/pods" I0125 05:16:11.820645 4678 audit.go:45] 2017-01-25T05:16:11.820609191-05:00 AUDIT: id="809bbec6-2289-4678-b7ed-4ed5eaf9a070" response="200" I0125 05:16:11.820932 4678 panics.go:76] GET /api/v1/pods: (1.687952ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.828730 4678 audit.go:125] 2017-01-25T05:16:11.828710789-05:00 AUDIT: id="9c835b80-b4b8-4d16-8a15-7bb36dc30e03" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:11.829813 4678 audit.go:45] 2017-01-25T05:16:11.829763891-05:00 AUDIT: id="9c835b80-b4b8-4d16-8a15-7bb36dc30e03" response="200" I0125 05:16:11.830086 4678 panics.go:76] GET /api/v1/nodes: (1.573323ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.830766 4678 audit.go:125] 2017-01-25T05:16:11.830746957-05:00 AUDIT: id="754eb2b4-88f2-41b6-983b-09d26f8e8328" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222" I0125 05:16:11.831648 4678 audit.go:45] 2017-01-25T05:16:11.831637998-05:00 AUDIT: id="754eb2b4-88f2-41b6-983b-09d26f8e8328" response="200" I0125 05:16:11.831877 4678 panics.go:76] GET /api/v1/nodes/172.18.7.222: (1.313674ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.837431 4678 audit.go:125] 2017-01-25T05:16:11.837382786-05:00 AUDIT: id="64c6235f-08ea-415c-b0cb-00f9ebffce81" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="kube-system" uri="/api/v1/namespaces/kube-system/events?fieldSelector=involvedObject.kind%3DNode%2CinvolvedObject.name%3D172.18.7.222%2CinvolvedObject.namespace%3D%2Csource%3Dkubelet" I0125 05:16:11.838493 4678 audit.go:45] 2017-01-25T05:16:11.838483851-05:00 AUDIT: id="64c6235f-08ea-415c-b0cb-00f9ebffce81" response="200" I0125 05:16:11.838589 4678 panics.go:76] GET /api/v1/namespaces/kube-system/events?fieldSelector=involvedObject.kind%3DNode%2CinvolvedObject.name%3D172.18.7.222%2CinvolvedObject.namespace%3D%2Csource%3Dkubelet: (1.375155ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.839287 4678 audit.go:125] 2017-01-25T05:16:11.839242296-05:00 AUDIT: id="6370bbb7-11ff-4d58-97ff-f893bf3bfe49" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/version" I0125 05:16:11.839426 4678 audit.go:45] 2017-01-25T05:16:11.839413523-05:00 AUDIT: id="6370bbb7-11ff-4d58-97ff-f893bf3bfe49" response="200" I0125 05:16:11.839444 4678 panics.go:76] GET /version: (382.611µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.840183 4678 audit.go:125] 2017-01-25T05:16:11.840164368-05:00 AUDIT: id="6281c52d-d33e-48cc-9eca-f2607cce8326" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222:10250/proxy/pods" I0125 05:16:11.854366 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"get", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/pods"} I0125 05:16:11.855105 4678 audit.go:125] 2017-01-25T05:16:11.855072232-05:00 AUDIT: id="b6190b3c-b2df-4b6a-946a-d57b59cf081e" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/oapi/v1/subjectaccessreviews" I0125 05:16:11.855680 4678 audit.go:45] 2017-01-25T05:16:11.85566934-05:00 AUDIT: id="b6190b3c-b2df-4b6a-946a-d57b59cf081e" response="201" I0125 05:16:11.855747 4678 panics.go:76] POST /oapi/v1/subjectaccessreviews: (866.789µs) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50864] I0125 05:16:11.856028 4678 authorizer.go:69] allowed=true, reason=allowed by cluster rule I0125 05:16:11.856453 4678 server.go:744] GET /pods: (2.285916ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:52858] I0125 05:16:11.856549 4678 audit.go:45] 2017-01-25T05:16:11.856533317-05:00 AUDIT: id="6281c52d-d33e-48cc-9eca-f2607cce8326" response="200" I0125 05:16:11.856929 4678 panics.go:76] GET /api/v1/nodes/172.18.7.222:10250/proxy/pods: (16.937853ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.858360 4678 audit.go:125] 2017-01-25T05:16:11.858336843-05:00 AUDIT: id="b975ee1d-a7d1-4696-ad7c-ae1e0cf40d6f" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:11.859392 4678 audit.go:45] 2017-01-25T05:16:11.859378237-05:00 AUDIT: id="b975ee1d-a7d1-4696-ad7c-ae1e0cf40d6f" response="200" I0125 05:16:11.859671 4678 panics.go:76] GET /api/v1/nodes: (1.566408ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.860481 4678 audit.go:125] 2017-01-25T05:16:11.860461787-05:00 AUDIT: id="198fd3c2-8fdd-4b9b-a10a-f0af46fc4eaf" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222" I0125 05:16:11.861405 4678 audit.go:45] 2017-01-25T05:16:11.861395356-05:00 AUDIT: id="198fd3c2-8fdd-4b9b-a10a-f0af46fc4eaf" response="200" I0125 05:16:11.861626 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222: (1.351073ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:11.862221 4678 audit.go:125] 2017-01-25T05:16:11.862202192-05:00 AUDIT: id="5790e82b-baa5-4535-b194-00bc71f2c818" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/proxy/nodes/172.18.7.222:10250/metrics" I0125 05:16:11.865338 4678 proxy.go:187] [219329f45fb85840] Beginning proxy /api/v1/proxy/nodes/172.18.7.222:10250/metrics... I0125 05:16:11.865625 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"get", APIVersion:"v1", APIGroup:"", Resource:"nodes/metrics", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/metrics"} I0125 05:16:11.866170 4678 audit.go:125] 2017-01-25T05:16:11.866146219-05:00 AUDIT: id="c5f98c8a-54b7-403f-9a00-df7056534910" ip="172.18.7.222" method="POST" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/oapi/v1/subjectaccessreviews" I0125 05:16:11.866753 4678 audit.go:45] 2017-01-25T05:16:11.866742991-05:00 AUDIT: id="c5f98c8a-54b7-403f-9a00-df7056534910" response="201" I0125 05:16:11.866813 4678 panics.go:76] POST /oapi/v1/subjectaccessreviews: (831.981µs) 201 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50864] I0125 05:16:11.867028 4678 authorizer.go:69] allowed=true, reason=allowed by cluster rule I0125 05:16:11.973888 4678 audit.go:125] 2017-01-25T05:16:11.973845198-05:00 AUDIT: id="b0c1569f-d39a-4aa5-ab9b-ea2def08d064" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:16:11.975162 4678 audit.go:45] 2017-01-25T05:16:11.975148737-05:00 AUDIT: id="b0c1569f-d39a-4aa5-ab9b-ea2def08d064" response="200" I0125 05:16:11.975266 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.706404ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:11.977859 4678 audit.go:45] 2017-01-25T05:16:11.977844895-05:00 AUDIT: id="5790e82b-baa5-4535-b194-00bc71f2c818" response="200" I0125 05:16:11.977914 4678 server.go:744] GET /metrics: (112.41811ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:52858] I0125 05:16:11.978394 4678 proxy.go:189] [219329f45fb85840] Proxy /api/v1/proxy/nodes/172.18.7.222:10250/metrics finished 113.054719ms. I0125 05:16:11.978574 4678 panics.go:76] GET /api/v1/proxy/nodes/172.18.7.222:10250/metrics: (116.534202ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:12.017983 4678 audit.go:125] 2017-01-25T05:16:12.017948679-05:00 AUDIT: id="7fa827b6-92d8-4849-85aa-34e49d099afa" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="kube-system" uri="/api/v1/namespaces/kube-system/pods?labelSelector=name%3De2e-image-puller" I0125 05:16:12.019102 4678 audit.go:45] 2017-01-25T05:16:12.019087223-05:00 AUDIT: id="7fa827b6-92d8-4849-85aa-34e49d099afa" response="200" I0125 05:16:12.019176 4678 panics.go:76] GET /api/v1/namespaces/kube-system/pods?labelSelector=name%3De2e-image-puller: (1.44078ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:12.019929 4678 audit.go:125] 2017-01-25T05:16:12.019910592-05:00 AUDIT: id="976ccb85-fe6a-4060-a42f-d946daaeb617" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:12.020867 4678 audit.go:45] 2017-01-25T05:16:12.020854859-05:00 AUDIT: id="976ccb85-fe6a-4060-a42f-d946daaeb617" response="200" I0125 05:16:12.021082 4678 panics.go:76] GET /api/v1/nodes: (1.367153ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:12.022013 4678 audit.go:125] 2017-01-25T05:16:12.021981814-05:00 AUDIT: id="8646864b-886b-4bd4-a339-98fd0899a938" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:16:12.024000 4678 audit.go:45] 2017-01-25T05:16:12.023990089-05:00 AUDIT: id="8646864b-886b-4bd4-a339-98fd0899a938" response="200" I0125 05:16:12.024038 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (2.328589ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:12.024941 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (1.496µs) I0125 05:16:12.025747 4678 audit.go:125] 2017-01-25T05:16:12.025715658-05:00 AUDIT: id="5ee0b3c6-0ae2-4767-b88e-df891f40f20c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/buildconfigs" I0125 05:16:12.025747 4678 audit.go:125] 2017-01-25T05:16:12.025715627-05:00 AUDIT: id="fed02bf7-4fe2-4cdc-91ba-dccf2aa480d8" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:16:12.026577 4678 audit.go:45] 2017-01-25T05:16:12.026564685-05:00 AUDIT: id="fed02bf7-4fe2-4cdc-91ba-dccf2aa480d8" response="200" I0125 05:16:12.026633 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (1.146852ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:12.026819 4678 audit.go:45] 2017-01-25T05:16:12.026805548-05:00 AUDIT: id="5ee0b3c6-0ae2-4767-b88e-df891f40f20c" response="200" I0125 05:16:12.026874 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/buildconfigs: (1.389599ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.027525 4678 audit.go:125] 2017-01-25T05:16:12.027503192-05:00 AUDIT: id="d7f9e547-b701-4b6e-b7f6-af562aa2fea8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/builds" I0125 05:16:12.028361 4678 audit.go:45] 2017-01-25T05:16:12.0283516-05:00 AUDIT: id="d7f9e547-b701-4b6e-b7f6-af562aa2fea8" response="200" I0125 05:16:12.028403 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/builds: (1.07213ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.028993 4678 audit.go:125] 2017-01-25T05:16:12.028972389-05:00 AUDIT: id="5971ee69-0dd7-448c-9519-1690b2b90b88" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deploymentconfigs" I0125 05:16:12.029750 4678 audit.go:45] 2017-01-25T05:16:12.029740163-05:00 AUDIT: id="5971ee69-0dd7-448c-9519-1690b2b90b88" response="200" I0125 05:16:12.029800 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deploymentconfigs: (976.273µs) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.030409 4678 audit.go:125] 2017-01-25T05:16:12.030378668-05:00 AUDIT: id="72fc6100-d72b-4b3f-8e7f-22f211e5dfd6" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/egressnetworkpolicies" I0125 05:16:12.031240 4678 audit.go:45] 2017-01-25T05:16:12.031226597-05:00 AUDIT: id="72fc6100-d72b-4b3f-8e7f-22f211e5dfd6" response="200" I0125 05:16:12.031307 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/egressnetworkpolicies: (1.076632ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.031949 4678 audit.go:125] 2017-01-25T05:16:12.03192721-05:00 AUDIT: id="d715d299-e6d2-4c81-ada8-9aa62ebe7afe" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/imagestreams" I0125 05:16:12.032770 4678 audit.go:45] 2017-01-25T05:16:12.032760942-05:00 AUDIT: id="d715d299-e6d2-4c81-ada8-9aa62ebe7afe" response="200" I0125 05:16:12.032813 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/imagestreams: (1.056357ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.033379 4678 audit.go:125] 2017-01-25T05:16:12.033353686-05:00 AUDIT: id="e1f51fdd-eab9-44e4-8293-0783658a9d9a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/policies" I0125 05:16:12.034134 4678 audit.go:45] 2017-01-25T05:16:12.034124522-05:00 AUDIT: id="e1f51fdd-eab9-44e4-8293-0783658a9d9a" response="200" I0125 05:16:12.034185 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/policies: (1.004461ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.034789 4678 audit.go:125] 2017-01-25T05:16:12.034767668-05:00 AUDIT: id="065a22f1-3988-4610-b862-49d5abb8a515" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/policybindings" I0125 05:16:12.035664 4678 audit.go:45] 2017-01-25T05:16:12.035654345-05:00 AUDIT: id="065a22f1-3988-4610-b862-49d5abb8a515" response="200" I0125 05:16:12.035734 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/policybindings: (1.13243ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.036389 4678 audit.go:125] 2017-01-25T05:16:12.036367251-05:00 AUDIT: id="e18e4c97-ae4b-44ab-a269-2a8f129a0bd2" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/policybindings/:default" I0125 05:16:12.039017 4678 audit.go:45] 2017-01-25T05:16:12.039003333-05:00 AUDIT: id="e18e4c97-ae4b-44ab-a269-2a8f129a0bd2" response="200" I0125 05:16:12.039060 4678 panics.go:76] DELETE /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/policybindings/:default: (2.871005ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.039642 4678 audit.go:125] 2017-01-25T05:16:12.039618689-05:00 AUDIT: id="a37faebd-7ed2-4ed9-9107-23721863b85b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings" I0125 05:16:12.040419 4678 audit.go:45] 2017-01-25T05:16:12.040409259-05:00 AUDIT: id="a37faebd-7ed2-4ed9-9107-23721863b85b" response="200" I0125 05:16:12.040465 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/rolebindings: (1.005887ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.041075 4678 audit.go:125] 2017-01-25T05:16:12.041053781-05:00 AUDIT: id="de7168bb-775d-449e-a7b3-1cdacbfb7e8e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/roles" I0125 05:16:12.041792 4678 audit.go:45] 2017-01-25T05:16:12.0417823-05:00 AUDIT: id="de7168bb-775d-449e-a7b3-1cdacbfb7e8e" response="200" I0125 05:16:12.041838 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/roles: (961.095µs) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.042397 4678 audit.go:125] 2017-01-25T05:16:12.042375599-05:00 AUDIT: id="671cc1a5-732f-4faf-adb8-96eb99eb0adc" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/routes" I0125 05:16:12.043212 4678 audit.go:45] 2017-01-25T05:16:12.043202245-05:00 AUDIT: id="671cc1a5-732f-4faf-adb8-96eb99eb0adc" response="200" I0125 05:16:12.043254 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/routes: (1.054996ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.043775 4678 audit.go:125] 2017-01-25T05:16:12.04375341-05:00 AUDIT: id="86f4eeaa-c039-43eb-961d-c5bc51e70814" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/templates" I0125 05:16:12.044508 4678 audit.go:45] 2017-01-25T05:16:12.044498319-05:00 AUDIT: id="86f4eeaa-c039-43eb-961d-c5bc51e70814" response="200" I0125 05:16:12.044552 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/templates: (954.909µs) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:12.045149 4678 audit.go:125] 2017-01-25T05:16:12.04512594-05:00 AUDIT: id="f5d8df4e-5899-4485-a8b3-e117549ff5c2" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/finalize" I0125 05:16:12.046360 4678 audit.go:45] 2017-01-25T05:16:12.046346859-05:00 AUDIT: id="f5d8df4e-5899-4485-a8b3-e117549ff5c2" response="200" I0125 05:16:12.046413 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/finalize: (1.439473ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:12.046763 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (2.209µs) I0125 05:16:12.967147 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:12.967172 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:12.967877 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:12.967894 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:12.969113 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:12 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42d1d9e00 0 [] true false map[] 0xc42a7aaa50 } I0125 05:16:12.969161 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:12.969268 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Content-Length:[0] Content-Type:[text/plain; charset=utf-8] Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:12 GMT]] 0xc42d1d9f20 0 [] true false map[] 0xc429143950 } I0125 05:16:12.969305 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:13.449897 4678 audit.go:125] 2017-01-25T05:16:13.449864906-05:00 AUDIT: id="edd1e49e-65f6-43c5-9302-1c28b25f771c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:13.450309 4678 audit.go:45] 2017-01-25T05:16:13.450300381-05:00 AUDIT: id="edd1e49e-65f6-43c5-9302-1c28b25f771c" response="200" I0125 05:16:13.450584 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (929.483µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:13.450914 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:16:13.684596 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:14.010156 4678 gc_controller.go:175] GC'ing orphaned I0125 05:16:14.010175 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:16:15.380036 4678 audit.go:125] 2017-01-25T05:16:15.380001504-05:00 AUDIT: id="55299760-442b-46c7-9d25-ec5f3d42870d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:16:15.380956 4678 audit.go:45] 2017-01-25T05:16:15.380945736-05:00 AUDIT: id="55299760-442b-46c7-9d25-ec5f3d42870d" response="200" I0125 05:16:15.381041 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.219635ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:15.684597 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:17.027336 4678 audit.go:125] 2017-01-25T05:16:17.02728996-05:00 AUDIT: id="a46b20c6-712e-4ad5-a546-5b44050c0af8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:16:17.027538 4678 audit.go:125] 2017-01-25T05:16:17.027510674-05:00 AUDIT: id="b273764e-749a-4a3b-8349-c34158ca3fdb" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:16:17.028423 4678 audit.go:45] 2017-01-25T05:16:17.028412637-05:00 AUDIT: id="a46b20c6-712e-4ad5-a546-5b44050c0af8" response="200" I0125 05:16:17.028479 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (3.065926ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.028613 4678 audit.go:45] 2017-01-25T05:16:17.028604984-05:00 AUDIT: id="b273764e-749a-4a3b-8349-c34158ca3fdb" response="200" I0125 05:16:17.028645 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (1.358264ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:17.028915 4678 namespace_controller_utils.go:352] namespace controller - deleteAllContent - namespace: extended-test-postgresql-replication-0-bwll6-pnjps, gvrs: [{apps v1beta1 statefulsets} {autoscaling v1 horizontalpodautoscalers} {batch v1 jobs} {batch v2alpha1 cronjobs} {batch v2alpha1 scheduledjobs} {extensions v1beta1 daemonsets} {extensions v1beta1 deployments} {extensions v1beta1 horizontalpodautoscalers} {extensions v1beta1 ingresses} {extensions v1beta1 jobs} {extensions v1beta1 networkpolicies} {extensions v1beta1 replicasets} {extensions v1beta1 replicationcontrollers} {policy v1beta1 poddisruptionbudgets} { v1 bindings} { v1 configmaps} { v1 endpoints} { v1 events} { v1 limitranges} { v1 persistentvolumeclaims} { v1 serviceaccounts} { v1 podtemplates} { v1 replicationcontrollers} { v1 resourcequotas} { v1 secrets} { v1 services} { v1 pods}] I0125 05:16:17.030947 4678 audit.go:125] 2017-01-25T05:16:17.030914419-05:00 AUDIT: id="c0ec46b5-c617-4701-96fd-6d6310c9a9c0" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/statefulsets" I0125 05:16:17.031910 4678 audit.go:45] 2017-01-25T05:16:17.031897895-05:00 AUDIT: id="c0ec46b5-c617-4701-96fd-6d6310c9a9c0" response="200" I0125 05:16:17.031966 4678 panics.go:76] DELETE /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/statefulsets: (2.577429ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.033857 4678 audit.go:125] 2017-01-25T05:16:17.03383331-05:00 AUDIT: id="64158c2f-54e1-48cc-83a3-e08bb49c7451" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/statefulsets" I0125 05:16:17.034592 4678 audit.go:45] 2017-01-25T05:16:17.034582255-05:00 AUDIT: id="64158c2f-54e1-48cc-83a3-e08bb49c7451" response="200" I0125 05:16:17.034660 4678 panics.go:76] GET /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/statefulsets: (2.270092ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.036745 4678 audit.go:125] 2017-01-25T05:16:17.036714832-05:00 AUDIT: id="03f171c5-4990-44f6-9e3a-58fe87304516" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers" I0125 05:16:17.037677 4678 audit.go:45] 2017-01-25T05:16:17.037667046-05:00 AUDIT: id="03f171c5-4990-44f6-9e3a-58fe87304516" response="200" I0125 05:16:17.037730 4678 panics.go:76] DELETE /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers: (2.495904ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.039573 4678 audit.go:125] 2017-01-25T05:16:17.039550275-05:00 AUDIT: id="6896d6fd-c876-44b1-aed0-5f94c8814af6" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers" I0125 05:16:17.040251 4678 audit.go:45] 2017-01-25T05:16:17.040241399-05:00 AUDIT: id="6896d6fd-c876-44b1-aed0-5f94c8814af6" response="200" I0125 05:16:17.040302 4678 panics.go:76] GET /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers: (2.143407ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.042285 4678 audit.go:125] 2017-01-25T05:16:17.042261838-05:00 AUDIT: id="962d802c-4f08-4cc0-8203-d6462215b522" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs" I0125 05:16:17.043135 4678 audit.go:45] 2017-01-25T05:16:17.043126115-05:00 AUDIT: id="962d802c-4f08-4cc0-8203-d6462215b522" response="200" I0125 05:16:17.043207 4678 panics.go:76] DELETE /apis/batch/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs: (2.312014ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.045065 4678 audit.go:125] 2017-01-25T05:16:17.045040872-05:00 AUDIT: id="18eeba7d-bf90-4860-99c8-b42bc464a986" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs" I0125 05:16:17.045765 4678 audit.go:45] 2017-01-25T05:16:17.045756062-05:00 AUDIT: id="18eeba7d-bf90-4860-99c8-b42bc464a986" response="200" I0125 05:16:17.045814 4678 panics.go:76] GET /apis/batch/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs: (2.207974ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.047773 4678 audit.go:125] 2017-01-25T05:16:17.047750429-05:00 AUDIT: id="db6ad837-5b58-4174-9070-3ba30c55f46b" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/cronjobs" I0125 05:16:17.048618 4678 audit.go:45] 2017-01-25T05:16:17.048607944-05:00 AUDIT: id="db6ad837-5b58-4174-9070-3ba30c55f46b" response="200" I0125 05:16:17.048671 4678 panics.go:76] DELETE /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/cronjobs: (2.340973ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.050478 4678 audit.go:125] 2017-01-25T05:16:17.050454752-05:00 AUDIT: id="c1c3505d-7585-40f3-b41d-a2af6bce52e0" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/cronjobs" I0125 05:16:17.051219 4678 audit.go:45] 2017-01-25T05:16:17.051208881-05:00 AUDIT: id="c1c3505d-7585-40f3-b41d-a2af6bce52e0" response="200" I0125 05:16:17.051268 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/cronjobs: (2.192307ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.053148 4678 audit.go:125] 2017-01-25T05:16:17.053124032-05:00 AUDIT: id="7c382bd8-204e-426c-bd61-9ae01f97e6a0" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/scheduledjobs" I0125 05:16:17.053996 4678 audit.go:45] 2017-01-25T05:16:17.053984928-05:00 AUDIT: id="7c382bd8-204e-426c-bd61-9ae01f97e6a0" response="200" I0125 05:16:17.054045 4678 panics.go:76] DELETE /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/scheduledjobs: (2.326823ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.055922 4678 audit.go:125] 2017-01-25T05:16:17.055899032-05:00 AUDIT: id="082dbb98-a2e7-469a-8fc8-0b394924dc7a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/scheduledjobs" I0125 05:16:17.056604 4678 audit.go:45] 2017-01-25T05:16:17.056594907-05:00 AUDIT: id="082dbb98-a2e7-469a-8fc8-0b394924dc7a" response="200" I0125 05:16:17.056663 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/scheduledjobs: (2.200226ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.058638 4678 audit.go:125] 2017-01-25T05:16:17.058614242-05:00 AUDIT: id="904a530a-6f3e-4e6d-96eb-af190e43d8e8" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/daemonsets" I0125 05:16:17.059527 4678 audit.go:45] 2017-01-25T05:16:17.059517597-05:00 AUDIT: id="904a530a-6f3e-4e6d-96eb-af190e43d8e8" response="200" I0125 05:16:17.059577 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/daemonsets: (2.380516ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.061491 4678 audit.go:125] 2017-01-25T05:16:17.061468701-05:00 AUDIT: id="11c0a600-c556-4af2-b080-32190f834fb4" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/daemonsets" I0125 05:16:17.062213 4678 audit.go:45] 2017-01-25T05:16:17.062192163-05:00 AUDIT: id="11c0a600-c556-4af2-b080-32190f834fb4" response="200" I0125 05:16:17.062274 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/daemonsets: (2.298245ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.064242 4678 audit.go:125] 2017-01-25T05:16:17.064217524-05:00 AUDIT: id="418e0702-c395-472b-8475-8111c0a24753" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deployments" I0125 05:16:17.065076 4678 audit.go:45] 2017-01-25T05:16:17.065066294-05:00 AUDIT: id="418e0702-c395-472b-8475-8111c0a24753" response="200" I0125 05:16:17.065128 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deployments: (2.327621ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.066966 4678 audit.go:125] 2017-01-25T05:16:17.066927931-05:00 AUDIT: id="d292802a-6fce-4516-9140-4d9afb11597f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deployments" I0125 05:16:17.067630 4678 audit.go:45] 2017-01-25T05:16:17.067620655-05:00 AUDIT: id="d292802a-6fce-4516-9140-4d9afb11597f" response="200" I0125 05:16:17.067679 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deployments: (2.143069ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.069679 4678 audit.go:125] 2017-01-25T05:16:17.069655131-05:00 AUDIT: id="a2d5fcf5-e832-46ee-8b8e-cb8f75ebde86" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers" I0125 05:16:17.070567 4678 audit.go:45] 2017-01-25T05:16:17.070557355-05:00 AUDIT: id="a2d5fcf5-e832-46ee-8b8e-cb8f75ebde86" response="200" I0125 05:16:17.070619 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers: (2.353757ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.072340 4678 audit.go:125] 2017-01-25T05:16:17.072317533-05:00 AUDIT: id="bfade980-77d9-485c-9d18-f6f74fc3d804" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers" I0125 05:16:17.073070 4678 audit.go:45] 2017-01-25T05:16:17.073060621-05:00 AUDIT: id="bfade980-77d9-485c-9d18-f6f74fc3d804" response="200" I0125 05:16:17.073116 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers: (2.20158ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.075085 4678 audit.go:125] 2017-01-25T05:16:17.075062382-05:00 AUDIT: id="a0f99b98-c965-4224-b90f-19df04716326" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/ingresses" I0125 05:16:17.075950 4678 audit.go:45] 2017-01-25T05:16:17.075939909-05:00 AUDIT: id="a0f99b98-c965-4224-b90f-19df04716326" response="200" I0125 05:16:17.075999 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/ingresses: (2.309475ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.077892 4678 audit.go:125] 2017-01-25T05:16:17.077869604-05:00 AUDIT: id="b832d91f-4de5-44ca-a73f-bb4682dd4667" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/ingresses" I0125 05:16:17.078478 4678 audit.go:45] 2017-01-25T05:16:17.078468477-05:00 AUDIT: id="b832d91f-4de5-44ca-a73f-bb4682dd4667" response="200" I0125 05:16:17.078524 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/ingresses: (2.111069ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.080112 4678 audit.go:125] 2017-01-25T05:16:17.080088677-05:00 AUDIT: id="afbf4408-7259-45ae-88c3-f0553dfa72fb" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs" I0125 05:16:17.080990 4678 audit.go:45] 2017-01-25T05:16:17.080980761-05:00 AUDIT: id="afbf4408-7259-45ae-88c3-f0553dfa72fb" response="200" I0125 05:16:17.081041 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs: (2.082903ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.082926 4678 audit.go:125] 2017-01-25T05:16:17.082901741-05:00 AUDIT: id="80b7a9a6-58e8-4d5a-a311-5f67ac1f14d3" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs" I0125 05:16:17.083616 4678 audit.go:45] 2017-01-25T05:16:17.083606958-05:00 AUDIT: id="80b7a9a6-58e8-4d5a-a311-5f67ac1f14d3" response="200" I0125 05:16:17.083677 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs: (2.245292ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.085630 4678 audit.go:125] 2017-01-25T05:16:17.085607011-05:00 AUDIT: id="a109cd83-083b-46e4-afcf-83933519c2ac" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/networkpolicies" I0125 05:16:17.086437 4678 audit.go:45] 2017-01-25T05:16:17.086427439-05:00 AUDIT: id="a109cd83-083b-46e4-afcf-83933519c2ac" response="200" I0125 05:16:17.086490 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/networkpolicies: (2.2851ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.088257 4678 audit.go:125] 2017-01-25T05:16:17.088231385-05:00 AUDIT: id="ec599de5-d9b1-429d-a06c-c67fac3c6123" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/networkpolicies" I0125 05:16:17.088961 4678 audit.go:45] 2017-01-25T05:16:17.088951071-05:00 AUDIT: id="ec599de5-d9b1-429d-a06c-c67fac3c6123" response="200" I0125 05:16:17.089020 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/networkpolicies: (2.123567ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.090934 4678 audit.go:125] 2017-01-25T05:16:17.090908589-05:00 AUDIT: id="3e18ea90-3f6f-46c2-9615-301bb099bb91" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicasets" I0125 05:16:17.091824 4678 audit.go:45] 2017-01-25T05:16:17.091813894-05:00 AUDIT: id="3e18ea90-3f6f-46c2-9615-301bb099bb91" response="200" I0125 05:16:17.091876 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicasets: (2.294785ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.093388 4678 audit.go:125] 2017-01-25T05:16:17.093364169-05:00 AUDIT: id="ba586e5b-51c6-46c1-a2e9-a3b1e5ec6d3e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicasets" I0125 05:16:17.093951 4678 audit.go:45] 2017-01-25T05:16:17.093941721-05:00 AUDIT: id="ba586e5b-51c6-46c1-a2e9-a3b1e5ec6d3e" response="200" I0125 05:16:17.093997 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicasets: (1.82509ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.095982 4678 audit.go:125] 2017-01-25T05:16:17.095959141-05:00 AUDIT: id="6d0306c7-267b-46d1-9c8e-a528ae396d11" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/poddisruptionbudgets" I0125 05:16:17.096732 4678 audit.go:45] 2017-01-25T05:16:17.096722367-05:00 AUDIT: id="6d0306c7-267b-46d1-9c8e-a528ae396d11" response="200" I0125 05:16:17.096781 4678 panics.go:76] DELETE /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/poddisruptionbudgets: (2.228496ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.098457 4678 audit.go:125] 2017-01-25T05:16:17.098433465-05:00 AUDIT: id="a2b78aeb-139e-4b1e-8a16-fd33f039c666" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/poddisruptionbudgets" I0125 05:16:17.099129 4678 audit.go:45] 2017-01-25T05:16:17.09912016-05:00 AUDIT: id="a2b78aeb-139e-4b1e-8a16-fd33f039c666" response="200" I0125 05:16:17.099176 4678 panics.go:76] GET /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/poddisruptionbudgets: (2.098013ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.101264 4678 audit.go:125] 2017-01-25T05:16:17.101239124-05:00 AUDIT: id="b51f5d41-2034-4e55-81eb-b64d55037f84" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/configmaps" I0125 05:16:17.102225 4678 audit.go:45] 2017-01-25T05:16:17.102214293-05:00 AUDIT: id="b51f5d41-2034-4e55-81eb-b64d55037f84" response="200" I0125 05:16:17.102273 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/configmaps: (2.472471ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.104094 4678 audit.go:125] 2017-01-25T05:16:17.104068541-05:00 AUDIT: id="d19490e6-88ba-451f-ad95-f4a842f40543" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/configmaps" I0125 05:16:17.104760 4678 audit.go:45] 2017-01-25T05:16:17.10475041-05:00 AUDIT: id="d19490e6-88ba-451f-ad95-f4a842f40543" response="200" I0125 05:16:17.104808 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/configmaps: (2.143315ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.106785 4678 audit.go:125] 2017-01-25T05:16:17.10676162-05:00 AUDIT: id="c97fa42f-5c6d-4c54-8aff-3c21b25ac40d" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/endpoints" I0125 05:16:17.107681 4678 audit.go:45] 2017-01-25T05:16:17.107672093-05:00 AUDIT: id="c97fa42f-5c6d-4c54-8aff-3c21b25ac40d" response="200" I0125 05:16:17.107736 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/endpoints: (2.416785ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.109723 4678 audit.go:125] 2017-01-25T05:16:17.109691915-05:00 AUDIT: id="2c8cba1c-9e53-40c8-9535-b4cf5d027f91" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/endpoints" I0125 05:16:17.110459 4678 audit.go:45] 2017-01-25T05:16:17.110444329-05:00 AUDIT: id="2c8cba1c-9e53-40c8-9535-b4cf5d027f91" response="200" I0125 05:16:17.110506 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/endpoints: (2.423898ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.112487 4678 audit.go:125] 2017-01-25T05:16:17.112463855-05:00 AUDIT: id="322d5216-70f7-4503-b3ea-0b8833575926" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events" I0125 05:16:17.113373 4678 audit.go:45] 2017-01-25T05:16:17.113364078-05:00 AUDIT: id="322d5216-70f7-4503-b3ea-0b8833575926" response="200" I0125 05:16:17.113422 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events: (2.369393ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.115162 4678 audit.go:125] 2017-01-25T05:16:17.115139263-05:00 AUDIT: id="f064e68a-83b5-4955-9eba-6af0f0fc4598" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events" I0125 05:16:17.115856 4678 audit.go:45] 2017-01-25T05:16:17.115846006-05:00 AUDIT: id="f064e68a-83b5-4955-9eba-6af0f0fc4598" response="200" I0125 05:16:17.115901 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events: (2.113121ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.117811 4678 audit.go:125] 2017-01-25T05:16:17.117786226-05:00 AUDIT: id="bfcf9f67-16b7-4e5c-9cb8-da748377bced" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/limitranges" I0125 05:16:17.118653 4678 audit.go:45] 2017-01-25T05:16:17.118643089-05:00 AUDIT: id="bfcf9f67-16b7-4e5c-9cb8-da748377bced" response="200" I0125 05:16:17.118702 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/limitranges: (2.257008ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.120611 4678 audit.go:125] 2017-01-25T05:16:17.120586771-05:00 AUDIT: id="5f537a09-17ac-4ba5-8b12-b85c84de4d56" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/limitranges" I0125 05:16:17.121295 4678 audit.go:45] 2017-01-25T05:16:17.121285667-05:00 AUDIT: id="5f537a09-17ac-4ba5-8b12-b85c84de4d56" response="200" I0125 05:16:17.121342 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/limitranges: (2.260275ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.123255 4678 audit.go:125] 2017-01-25T05:16:17.123231782-05:00 AUDIT: id="7641a8da-accf-4d32-9264-1608599bcac4" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/persistentvolumeclaims" I0125 05:16:17.124139 4678 audit.go:45] 2017-01-25T05:16:17.124129601-05:00 AUDIT: id="7641a8da-accf-4d32-9264-1608599bcac4" response="200" I0125 05:16:17.124189 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/persistentvolumeclaims: (2.362419ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.125994 4678 audit.go:125] 2017-01-25T05:16:17.125971656-05:00 AUDIT: id="2b216bd9-deb1-48bc-9d61-67136f9b800b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/persistentvolumeclaims" I0125 05:16:17.126706 4678 audit.go:45] 2017-01-25T05:16:17.126696851-05:00 AUDIT: id="2b216bd9-deb1-48bc-9d61-67136f9b800b" response="200" I0125 05:16:17.126753 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/persistentvolumeclaims: (2.152837ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.128744 4678 audit.go:125] 2017-01-25T05:16:17.128717732-05:00 AUDIT: id="8194ea3f-650a-40ca-99b7-ae47466594b4" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts" I0125 05:16:17.132168 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-0-bwll6-pnjps/builder), service account deleted, removing tokens I0125 05:16:17.132619 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (2.229µs) I0125 05:16:17.132913 4678 audit.go:125] 2017-01-25T05:16:17.132881179-05:00 AUDIT: id="389f2408-48a8-4a69-9dfc-26388d6333c5" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-4b44x" I0125 05:16:17.134926 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (2.059µs) I0125 05:16:17.134993 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-0-bwll6-pnjps/default), service account deleted, removing tokens I0125 05:16:17.135708 4678 audit.go:125] 2017-01-25T05:16:17.135673989-05:00 AUDIT: id="119924e5-a5ee-4ac1-a74c-287cc8758e81" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9" I0125 05:16:17.136702 4678 audit.go:45] 2017-01-25T05:16:17.13668806-05:00 AUDIT: id="389f2408-48a8-4a69-9dfc-26388d6333c5" response="200" I0125 05:16:17.136749 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-4b44x: (4.110328ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.137905 4678 audit.go:45] 2017-01-25T05:16:17.137892015-05:00 AUDIT: id="8194ea3f-650a-40ca-99b7-ae47466594b4" response="200" I0125 05:16:17.138024 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts: (10.690576ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.138584 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-0-bwll6-pnjps/deployer), service account deleted, removing tokens I0125 05:16:17.140076 4678 audit.go:125] 2017-01-25T05:16:17.140044152-05:00 AUDIT: id="d99aa869-222e-4da6-b70c-b8022c72b4ee" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b" I0125 05:16:17.140539 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (1.877µs) I0125 05:16:17.140796 4678 audit.go:125] 2017-01-25T05:16:17.140764201-05:00 AUDIT: id="5e994ad6-17ed-479f-aa74-aa20bb520dbc" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971" I0125 05:16:17.141679 4678 audit.go:125] 2017-01-25T05:16:17.14164103-05:00 AUDIT: id="60aa3e08-952e-498a-955c-1fb17ad003ac" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:17.142539 4678 audit.go:45] 2017-01-25T05:16:17.142525834-05:00 AUDIT: id="119924e5-a5ee-4ac1-a74c-287cc8758e81" response="200" I0125 05:16:17.142581 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9: (7.139151ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.143866 4678 audit.go:45] 2017-01-25T05:16:17.143856657-05:00 AUDIT: id="60aa3e08-952e-498a-955c-1fb17ad003ac" response="200" I0125 05:16:17.144746 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (3.334275ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.145320 4678 audit.go:125] 2017-01-25T05:16:17.145285836-05:00 AUDIT: id="308ce5a3-3228-447c-833d-19e3a4006fee" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts" I0125 05:16:17.146432 4678 audit.go:125] 2017-01-25T05:16:17.146398233-05:00 AUDIT: id="d0e4c561-920a-40f5-821d-de5d71f98622" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-pnq00" I0125 05:16:17.146976 4678 audit.go:45] 2017-01-25T05:16:17.14696329-05:00 AUDIT: id="308ce5a3-3228-447c-833d-19e3a4006fee" response="200" I0125 05:16:17.147039 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts: (7.956836ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.147266 4678 audit.go:125] 2017-01-25T05:16:17.147232611-05:00 AUDIT: id="2c548996-f2aa-427d-a957-b714151f1819" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:17.149351 4678 audit.go:45] 2017-01-25T05:16:17.149337517-05:00 AUDIT: id="d99aa869-222e-4da6-b70c-b8022c72b4ee" response="200" I0125 05:16:17.149394 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b: (9.567477ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.149625 4678 audit.go:45] 2017-01-25T05:16:17.149611711-05:00 AUDIT: id="5e994ad6-17ed-479f-aa74-aa20bb520dbc" response="200" I0125 05:16:17.149668 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971: (9.142185ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.149881 4678 audit.go:45] 2017-01-25T05:16:17.149868232-05:00 AUDIT: id="2c548996-f2aa-427d-a957-b714151f1819" response="200" I0125 05:16:17.151022 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (4.031127ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.151731 4678 audit.go:125] 2017-01-25T05:16:17.151704489-05:00 AUDIT: id="cde1aaf2-22ee-4f5b-b423-0e7c02f30fb4" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/podtemplates" I0125 05:16:17.152701 4678 audit.go:125] 2017-01-25T05:16:17.152668476-05:00 AUDIT: id="198727aa-78d2-4640-9369-5e7c326e8506" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-zbs7r" I0125 05:16:17.153643 4678 audit.go:125] 2017-01-25T05:16:17.153610965-05:00 AUDIT: id="e9a536ae-e891-4102-813f-3c9a248693bd" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-dockercfg-x02dh" I0125 05:16:17.155713 4678 audit.go:45] 2017-01-25T05:16:17.155696575-05:00 AUDIT: id="d0e4c561-920a-40f5-821d-de5d71f98622" response="200" I0125 05:16:17.155756 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-pnq00: (9.598266ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.155975 4678 audit.go:45] 2017-01-25T05:16:17.155962292-05:00 AUDIT: id="cde1aaf2-22ee-4f5b-b423-0e7c02f30fb4" response="200" I0125 05:16:17.156047 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/podtemplates: (7.90856ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.158509 4678 audit.go:45] 2017-01-25T05:16:17.158495796-05:00 AUDIT: id="198727aa-78d2-4640-9369-5e7c326e8506" response="200" I0125 05:16:17.158552 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-zbs7r: (6.116767ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.159046 4678 audit.go:125] 2017-01-25T05:16:17.159012001-05:00 AUDIT: id="5635de63-0944-42fa-a936-7191b86276f8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/podtemplates" I0125 05:16:17.159999 4678 audit.go:45] 2017-01-25T05:16:17.159984392-05:00 AUDIT: id="e9a536ae-e891-4102-813f-3c9a248693bd" response="200" I0125 05:16:17.160039 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-dockercfg-x02dh: (6.652661ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.160067 4678 audit.go:45] 2017-01-25T05:16:17.160055873-05:00 AUDIT: id="5635de63-0944-42fa-a936-7191b86276f8" response="200" I0125 05:16:17.160135 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/podtemplates: (3.629525ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.162078 4678 audit.go:125] 2017-01-25T05:16:17.162045222-05:00 AUDIT: id="1240c52f-ab99-4190-a230-073b5c24596f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default" I0125 05:16:17.162707 4678 audit.go:125] 2017-01-25T05:16:17.162672754-05:00 AUDIT: id="3e7642ee-6eb7-4681-b675-a5ec44f8bb71" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:17.162817 4678 audit.go:125] 2017-01-25T05:16:17.162779641-05:00 AUDIT: id="d7358478-6e7d-4827-9b29-e30b5b0641a7" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers" I0125 05:16:17.163562 4678 audit.go:45] 2017-01-25T05:16:17.163550268-05:00 AUDIT: id="1240c52f-ab99-4190-a230-073b5c24596f" response="404" I0125 05:16:17.163607 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/default: (1.79562ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.164360 4678 audit.go:125] 2017-01-25T05:16:17.164325884-05:00 AUDIT: id="b1a68beb-ca80-496b-988c-eb753e6e73f1" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9" I0125 05:16:17.164491 4678 audit.go:45] 2017-01-25T05:16:17.164478244-05:00 AUDIT: id="3e7642ee-6eb7-4681-b675-a5ec44f8bb71" response="200" I0125 05:16:17.164690 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.251864ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.164806 4678 audit.go:45] 2017-01-25T05:16:17.164792627-05:00 AUDIT: id="d7358478-6e7d-4827-9b29-e30b5b0641a7" response="200" I0125 05:16:17.164871 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers: (3.866689ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.165325 4678 audit.go:45] 2017-01-25T05:16:17.165312583-05:00 AUDIT: id="b1a68beb-ca80-496b-988c-eb753e6e73f1" response="404" I0125 05:16:17.165369 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/default-token-fbtw9: (1.299353ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.165689 4678 audit.go:125] 2017-01-25T05:16:17.165656859-05:00 AUDIT: id="417392b7-e2e0-4960-9137-b4ba9118f41c" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-dockercfg-w421d" I0125 05:16:17.167448 4678 audit.go:125] 2017-01-25T05:16:17.167414511-05:00 AUDIT: id="9037533b-2e44-4127-ae4e-680ea4fa07e0" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers" I0125 05:16:17.168266 4678 audit.go:45] 2017-01-25T05:16:17.168252246-05:00 AUDIT: id="9037533b-2e44-4127-ae4e-680ea4fa07e0" response="200" I0125 05:16:17.168331 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers: (2.541336ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.168377 4678 audit.go:45] 2017-01-25T05:16:17.168364664-05:00 AUDIT: id="417392b7-e2e0-4960-9137-b4ba9118f41c" response="200" I0125 05:16:17.168416 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-dockercfg-w421d: (2.988508ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.170368 4678 audit.go:125] 2017-01-25T05:16:17.170334363-05:00 AUDIT: id="6f16f73f-cb84-4607-b51b-921c096a2725" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:17.170906 4678 audit.go:125] 2017-01-25T05:16:17.170884977-05:00 AUDIT: id="40fc68d0-9025-4d5e-8a78-5fe381ba8515" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer" I0125 05:16:17.171532 4678 audit.go:125] 2017-01-25T05:16:17.17150075-05:00 AUDIT: id="1914e5e1-a07b-4a70-967e-36897471e5fe" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas" I0125 05:16:17.172114 4678 audit.go:45] 2017-01-25T05:16:17.172101583-05:00 AUDIT: id="40fc68d0-9025-4d5e-8a78-5fe381ba8515" response="404" I0125 05:16:17.172163 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/deployer: (1.423229ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.172286 4678 audit.go:45] 2017-01-25T05:16:17.172270606-05:00 AUDIT: id="6f16f73f-cb84-4607-b51b-921c096a2725" response="200" I0125 05:16:17.172454 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.379666ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.172793 4678 audit.go:45] 2017-01-25T05:16:17.172783771-05:00 AUDIT: id="1914e5e1-a07b-4a70-967e-36897471e5fe" response="200" I0125 05:16:17.172854 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas: (3.840279ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.173631 4678 audit.go:125] 2017-01-25T05:16:17.173599642-05:00 AUDIT: id="1093eb0f-d5b7-4450-8e41-ef3ccf698050" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-dockercfg-v9shh" I0125 05:16:17.174322 4678 audit.go:125] 2017-01-25T05:16:17.174286925-05:00 AUDIT: id="43057787-8234-49a9-874e-3fe4270c597c" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b" I0125 05:16:17.175356 4678 audit.go:45] 2017-01-25T05:16:17.17534352-05:00 AUDIT: id="43057787-8234-49a9-874e-3fe4270c597c" response="404" I0125 05:16:17.175397 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/deployer-token-rrz3b: (1.35478ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.175918 4678 audit.go:125] 2017-01-25T05:16:17.175880705-05:00 AUDIT: id="5b2cce78-1089-46f3-b47c-8be5f0ca7d53" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas" I0125 05:16:17.176698 4678 audit.go:45] 2017-01-25T05:16:17.176685302-05:00 AUDIT: id="1093eb0f-d5b7-4450-8e41-ef3ccf698050" response="200" I0125 05:16:17.176740 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-dockercfg-v9shh: (3.365459ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.176802 4678 audit.go:45] 2017-01-25T05:16:17.17678742-05:00 AUDIT: id="5b2cce78-1089-46f3-b47c-8be5f0ca7d53" response="200" I0125 05:16:17.176861 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas: (3.565059ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.178622 4678 audit.go:125] 2017-01-25T05:16:17.178581354-05:00 AUDIT: id="01248068-5913-440c-948b-63ac17a0d427" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:17.179297 4678 audit.go:125] 2017-01-25T05:16:17.179265196-05:00 AUDIT: id="573e875c-4585-415c-884d-fe1fb9e4372b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder" I0125 05:16:17.179498 4678 audit.go:125] 2017-01-25T05:16:17.179464607-05:00 AUDIT: id="8ce1a302-591a-4a2e-a158-664649c6d4c9" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:16:17.180492 4678 audit.go:45] 2017-01-25T05:16:17.180479496-05:00 AUDIT: id="573e875c-4585-415c-884d-fe1fb9e4372b" response="404" I0125 05:16:17.180534 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts/builder: (1.522288ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.180698 4678 audit.go:45] 2017-01-25T05:16:17.180685369-05:00 AUDIT: id="01248068-5913-440c-948b-63ac17a0d427" response="200" I0125 05:16:17.180741 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.388922ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.180858 4678 audit.go:45] 2017-01-25T05:16:17.180847208-05:00 AUDIT: id="8ce1a302-591a-4a2e-a158-664649c6d4c9" response="200" I0125 05:16:17.180907 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (3.139527ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.181243 4678 audit.go:125] 2017-01-25T05:16:17.181191856-05:00 AUDIT: id="6e94be46-7ebb-4269-b69b-9a4c10ad1cc2" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971" I0125 05:16:17.181669 4678 audit.go:125] 2017-01-25T05:16:17.181635291-05:00 AUDIT: id="208bc076-ef49-4819-a8b5-e93ae36433a9" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:17.182559 4678 audit.go:45] 2017-01-25T05:16:17.182550846-05:00 AUDIT: id="6e94be46-7ebb-4269-b69b-9a4c10ad1cc2" response="404" I0125 05:16:17.182590 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets/builder-token-hc971: (1.636555ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.183472 4678 audit.go:45] 2017-01-25T05:16:17.183459355-05:00 AUDIT: id="208bc076-ef49-4819-a8b5-e93ae36433a9" response="200" I0125 05:16:17.183517 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.107492ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:17.183953 4678 audit.go:125] 2017-01-25T05:16:17.183930289-05:00 AUDIT: id="f7985b51-50bc-447b-b81f-5a4b4d08f7f4" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:16:17.184649 4678 audit.go:45] 2017-01-25T05:16:17.184639099-05:00 AUDIT: id="f7985b51-50bc-447b-b81f-5a4b4d08f7f4" response="200" I0125 05:16:17.184692 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (2.8482ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.186609 4678 audit.go:125] 2017-01-25T05:16:17.186586886-05:00 AUDIT: id="30db3679-8ed1-45a1-a4ee-8f9f93e21bdd" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/services" I0125 05:16:17.187384 4678 audit.go:45] 2017-01-25T05:16:17.187374421-05:00 AUDIT: id="30db3679-8ed1-45a1-a4ee-8f9f93e21bdd" response="200" I0125 05:16:17.187435 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/services: (2.253332ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.189221 4678 audit.go:125] 2017-01-25T05:16:17.189185158-05:00 AUDIT: id="fb8a7982-65d0-42cb-ab2b-0bd2e2ed06d2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/services" I0125 05:16:17.189987 4678 audit.go:45] 2017-01-25T05:16:17.189977485-05:00 AUDIT: id="fb8a7982-65d0-42cb-ab2b-0bd2e2ed06d2" response="200" I0125 05:16:17.190030 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/services: (2.211328ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.191839 4678 audit.go:125] 2017-01-25T05:16:17.191816652-05:00 AUDIT: id="92db3166-def4-4b19-9bc8-c79ebd72f5d4" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods" I0125 05:16:17.192593 4678 audit.go:45] 2017-01-25T05:16:17.192583547-05:00 AUDIT: id="92db3166-def4-4b19-9bc8-c79ebd72f5d4" response="200" I0125 05:16:17.192646 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods: (2.169919ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.194373 4678 audit.go:125] 2017-01-25T05:16:17.194348932-05:00 AUDIT: id="14df19c4-9617-4e82-b83b-8d0892d4c6b5" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods" I0125 05:16:17.195235 4678 audit.go:45] 2017-01-25T05:16:17.195225574-05:00 AUDIT: id="14df19c4-9617-4e82-b83b-8d0892d4c6b5" response="200" I0125 05:16:17.195295 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods: (2.131495ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.197101 4678 audit.go:125] 2017-01-25T05:16:17.197077556-05:00 AUDIT: id="13be5580-01f8-48e8-b40b-15324803dba1" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods" I0125 05:16:17.197826 4678 audit.go:45] 2017-01-25T05:16:17.197812594-05:00 AUDIT: id="13be5580-01f8-48e8-b40b-15324803dba1" response="200" I0125 05:16:17.197902 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods: (2.224264ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.198125 4678 namespace_controller_utils.go:365] namespace controller - deleteAllContent - namespace: extended-test-postgresql-replication-0-bwll6-pnjps, estimate: 0 I0125 05:16:17.199863 4678 audit.go:125] 2017-01-25T05:16:17.199840545-05:00 AUDIT: id="c2e27286-738e-4cc7-b191-55496f6bc72b" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/finalize" I0125 05:16:17.201023 4678 audit.go:45] 2017-01-25T05:16:17.201009451-05:00 AUDIT: id="c2e27286-738e-4cc7-b191-55496f6bc72b" response="200" I0125 05:16:17.201071 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/finalize: (2.62581ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.201638 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (1.882µs) I0125 05:16:17.203156 4678 audit.go:125] 2017-01-25T05:16:17.203131845-05:00 AUDIT: id="1b0e6547-ef86-4f4b-9e6d-1dc64b220fb2" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:16:17.205853 4678 audit.go:45] 2017-01-25T05:16:17.205843889-05:00 AUDIT: id="1b0e6547-ef86-4f4b-9e6d-1dc64b220fb2" response="200" I0125 05:16:17.205891 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (4.218928ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:17.206305 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (443ns) I0125 05:16:17.684596 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:18.110043 4678 helpers.go:101] Unable to get network stats from pid 10710: couldn't read network stats: failure opening /proc/10710/net/dev: open /proc/10710/net/dev: no such file or directory I0125 05:16:18.451881 4678 audit.go:125] 2017-01-25T05:16:18.451839428-05:00 AUDIT: id="9b77d250-2a4f-4352-9ec8-9431efc948c9" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:18.452320 4678 audit.go:45] 2017-01-25T05:16:18.452310574-05:00 AUDIT: id="9b77d250-2a4f-4352-9ec8-9431efc948c9" response="200" I0125 05:16:18.452644 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.021314ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:19.137740 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:16:19.137914 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:16:19.138096 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:16:19.143112 4678 audit.go:125] 2017-01-25T05:16:19.143073364-05:00 AUDIT: id="3394f051-2da5-49b6-923d-ce8a38621587" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:16:19.143540 4678 audit.go:45] 2017-01-25T05:16:19.143522321-05:00 AUDIT: id="3394f051-2da5-49b6-923d-ce8a38621587" response="200" I0125 05:16:19.143869 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (1.004615ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:19.209590 4678 audit.go:125] 2017-01-25T05:16:19.209560403-05:00 AUDIT: id="5960d893-79e4-47ed-974b-dff131fe81a8" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:16:19.211807 4678 audit.go:45] 2017-01-25T05:16:19.211792591-05:00 AUDIT: id="5960d893-79e4-47ed-974b-dff131fe81a8" response="200" I0125 05:16:19.212031 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.644173ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:19.212887 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:16:19.260988 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:19.261014 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:19.261678 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:19.261695 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:19.262165 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc438083bc0 -1 [] true false map[] 0xc42dd9ec30 } I0125 05:16:19.262221 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:19.262348 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc438083ca0 -1 [] true false map[] 0xc42beb1e00 } I0125 05:16:19.262370 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:19.338447 4678 audit.go:125] 2017-01-25T05:16:19.338413044-05:00 AUDIT: id="94d7da5c-b5b8-42f9-bc64-9854572885b5" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:16:19.339326 4678 audit.go:45] 2017-01-25T05:16:19.339315858-05:00 AUDIT: id="94d7da5c-b5b8-42f9-bc64-9854572885b5" response="200" I0125 05:16:19.339416 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.913743ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:19.339666 4678 controller.go:106] Found 0 cronjobs I0125 05:16:19.341421 4678 audit.go:125] 2017-01-25T05:16:19.34140159-05:00 AUDIT: id="a4da8c59-a5ce-441d-9c5f-17d1656fe130" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:16:19.342146 4678 audit.go:45] 2017-01-25T05:16:19.342136795-05:00 AUDIT: id="a4da8c59-a5ce-441d-9c5f-17d1656fe130" response="200" I0125 05:16:19.342206 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.311366ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:19.342409 4678 controller.go:114] Found 0 jobs I0125 05:16:19.342419 4678 controller.go:117] Found 0 groups I0125 05:16:19.419694 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:19.419711 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:19.535103 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-helper-1-cpv6d I0125 05:16:19.535165 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:16:19.684595 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:20.000052 4678 helpers.go:101] Unable to get network stats from pid 12728: couldn't read network stats: failure opening /proc/12728/net/dev: open /proc/12728/net/dev: no such file or directory I0125 05:16:20.264263 4678 panics.go:76] GET /api/v1/watch/namespaces?resourceVersion=10540&timeoutSeconds=329: (5m29.001077575s) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:20.264480 4678 reflector.go:392] github.com/openshift/origin/pkg/project/cache/cache.go:107: Watch close - *api.Namespace total 14 items received I0125 05:16:20.265087 4678 audit.go:125] 2017-01-25T05:16:20.26504602-05:00 AUDIT: id="e228e5f4-c54c-4789-a09e-498d0030f13c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/watch/namespaces?resourceVersion=11286&timeoutSeconds=441" I0125 05:16:20.265539 4678 audit.go:45] 2017-01-25T05:16:20.265529177-05:00 AUDIT: id="e228e5f4-c54c-4789-a09e-498d0030f13c" response="200" I0125 05:16:21.684596 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:21.976403 4678 audit.go:125] 2017-01-25T05:16:21.97636163-05:00 AUDIT: id="cefb5880-3365-4585-9c23-2434dd3e8853" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:16:21.977555 4678 audit.go:45] 2017-01-25T05:16:21.977543803-05:00 AUDIT: id="cefb5880-3365-4585-9c23-2434dd3e8853" response="200" I0125 05:16:21.977641 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.503361ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:22.027740 4678 audit.go:125] 2017-01-25T05:16:22.027700523-05:00 AUDIT: id="93c51ffe-319a-404f-9700-6913f5c7c47e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps" I0125 05:16:22.028781 4678 audit.go:45] 2017-01-25T05:16:22.028771137-05:00 AUDIT: id="93c51ffe-319a-404f-9700-6913f5c7c47e" response="404" I0125 05:16:22.028851 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps: (1.358264ms) 404 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.029526 4678 audit.go:125] 2017-01-25T05:16:22.029507757-05:00 AUDIT: id="bf2920f7-6202-4190-86fd-c0dd6e8900a3" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api" I0125 05:16:22.029644 4678 audit.go:45] 2017-01-25T05:16:22.029630437-05:00 AUDIT: id="bf2920f7-6202-4190-86fd-c0dd6e8900a3" response="200" I0125 05:16:22.029687 4678 panics.go:76] GET /api: (356.248µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.030253 4678 audit.go:125] 2017-01-25T05:16:22.030232502-05:00 AUDIT: id="420ed822-1079-4033-aa2b-a7bc6e9d3fd1" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis" I0125 05:16:22.030378 4678 audit.go:45] 2017-01-25T05:16:22.030370092-05:00 AUDIT: id="420ed822-1079-4033-aa2b-a7bc6e9d3fd1" response="200" I0125 05:16:22.030434 4678 panics.go:76] GET /apis: (371.821µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.031045 4678 audit.go:125] 2017-01-25T05:16:22.031026339-05:00 AUDIT: id="06c97d43-ce03-4856-8cb2-4261d20b49da" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/apps/v1beta1" I0125 05:16:22.031149 4678 audit.go:45] 2017-01-25T05:16:22.031142464-05:00 AUDIT: id="06c97d43-ce03-4856-8cb2-4261d20b49da" response="200" I0125 05:16:22.031184 4678 panics.go:76] GET /apis/apps/v1beta1: (338.015µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.031794 4678 audit.go:125] 2017-01-25T05:16:22.031775425-05:00 AUDIT: id="0278fc63-16a1-45a1-a382-26fb9427d280" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/authentication.k8s.io/v1beta1" I0125 05:16:22.031892 4678 audit.go:45] 2017-01-25T05:16:22.031885542-05:00 AUDIT: id="0278fc63-16a1-45a1-a382-26fb9427d280" response="200" I0125 05:16:22.031915 4678 panics.go:76] GET /apis/authentication.k8s.io/v1beta1: (309.603µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.032467 4678 audit.go:125] 2017-01-25T05:16:22.032449363-05:00 AUDIT: id="e73189ba-cd6b-4a65-8a05-0c19c1b02d65" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/autoscaling/v1" I0125 05:16:22.032567 4678 audit.go:45] 2017-01-25T05:16:22.032559935-05:00 AUDIT: id="e73189ba-cd6b-4a65-8a05-0c19c1b02d65" response="200" I0125 05:16:22.032589 4678 panics.go:76] GET /apis/autoscaling/v1: (304.168µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.033136 4678 audit.go:125] 2017-01-25T05:16:22.033118931-05:00 AUDIT: id="2259d672-a401-4f12-bf0c-4cafdb6f55ca" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/batch/v1" I0125 05:16:22.033248 4678 audit.go:45] 2017-01-25T05:16:22.033241242-05:00 AUDIT: id="2259d672-a401-4f12-bf0c-4cafdb6f55ca" response="200" I0125 05:16:22.033271 4678 panics.go:76] GET /apis/batch/v1: (312.315µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.033819 4678 audit.go:125] 2017-01-25T05:16:22.033802223-05:00 AUDIT: id="19982f5c-cf0c-4a22-8131-f9da9946c991" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1" I0125 05:16:22.033928 4678 audit.go:45] 2017-01-25T05:16:22.033921302-05:00 AUDIT: id="19982f5c-cf0c-4a22-8131-f9da9946c991" response="200" I0125 05:16:22.033951 4678 panics.go:76] GET /apis/batch/v2alpha1: (312.359µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.034561 4678 audit.go:125] 2017-01-25T05:16:22.034542037-05:00 AUDIT: id="01901891-163d-4b7b-a8ba-8e395b0b2116" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/certificates.k8s.io/v1alpha1" I0125 05:16:22.034654 4678 audit.go:45] 2017-01-25T05:16:22.034646886-05:00 AUDIT: id="01901891-163d-4b7b-a8ba-8e395b0b2116" response="200" I0125 05:16:22.034677 4678 panics.go:76] GET /apis/certificates.k8s.io/v1alpha1: (306.542µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.035260 4678 audit.go:125] 2017-01-25T05:16:22.035240348-05:00 AUDIT: id="46be94d7-1d83-4b6d-b52b-47b6bf295d47" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1" I0125 05:16:22.035418 4678 audit.go:45] 2017-01-25T05:16:22.035410049-05:00 AUDIT: id="46be94d7-1d83-4b6d-b52b-47b6bf295d47" response="200" I0125 05:16:22.035494 4678 panics.go:76] GET /apis/extensions/v1beta1: (418.794µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.036064 4678 audit.go:125] 2017-01-25T05:16:22.036044486-05:00 AUDIT: id="e477a99a-f163-4e4e-be62-fbd962e9a9d5" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/policy/v1beta1" I0125 05:16:22.036181 4678 audit.go:45] 2017-01-25T05:16:22.036173034-05:00 AUDIT: id="e477a99a-f163-4e4e-be62-fbd962e9a9d5" response="200" I0125 05:16:22.036220 4678 panics.go:76] GET /apis/policy/v1beta1: (338.362µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.036770 4678 audit.go:125] 2017-01-25T05:16:22.036752067-05:00 AUDIT: id="e9060ece-386c-4110-add7-1c4ba65db715" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/storage.k8s.io/v1beta1" I0125 05:16:22.036884 4678 audit.go:45] 2017-01-25T05:16:22.036876386-05:00 AUDIT: id="e9060ece-386c-4110-add7-1c4ba65db715" response="200" I0125 05:16:22.036909 4678 panics.go:76] GET /apis/storage.k8s.io/v1beta1: (324.111µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.037459 4678 audit.go:125] 2017-01-25T05:16:22.037441694-05:00 AUDIT: id="34abeb56-4214-4deb-940b-70bf68f41caf" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1" I0125 05:16:22.037586 4678 audit.go:45] 2017-01-25T05:16:22.0375792-05:00 AUDIT: id="34abeb56-4214-4deb-940b-70bf68f41caf" response="200" I0125 05:16:22.037647 4678 panics.go:76] GET /api/v1: (379.534µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.038444 4678 audit.go:125] 2017-01-25T05:16:22.038414769-05:00 AUDIT: id="b5a391bc-e0f3-444d-b327-abab75f9a2dc" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/statefulsets" I0125 05:16:22.039247 4678 audit.go:45] 2017-01-25T05:16:22.039237107-05:00 AUDIT: id="b5a391bc-e0f3-444d-b327-abab75f9a2dc" response="200" I0125 05:16:22.039345 4678 panics.go:76] GET /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/statefulsets: (1.107136ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.040074 4678 audit.go:125] 2017-01-25T05:16:22.040051256-05:00 AUDIT: id="0b14049c-4699-43bf-b4a6-4317d4474348" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers" I0125 05:16:22.040902 4678 audit.go:45] 2017-01-25T05:16:22.0408921-05:00 AUDIT: id="0b14049c-4699-43bf-b4a6-4317d4474348" response="200" I0125 05:16:22.040957 4678 panics.go:76] GET /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers: (1.083015ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.041702 4678 audit.go:125] 2017-01-25T05:16:22.041679014-05:00 AUDIT: id="9048756b-692f-40f3-b8a9-3f013cc2b7fd" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs" I0125 05:16:22.042489 4678 audit.go:45] 2017-01-25T05:16:22.042479771-05:00 AUDIT: id="9048756b-692f-40f3-b8a9-3f013cc2b7fd" response="200" I0125 05:16:22.042541 4678 panics.go:76] GET /apis/batch/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs: (1.049314ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.043269 4678 audit.go:125] 2017-01-25T05:16:22.043247232-05:00 AUDIT: id="7a20be78-f11a-490d-be1c-132248599f7a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/cronjobs" I0125 05:16:22.043963 4678 audit.go:45] 2017-01-25T05:16:22.043953884-05:00 AUDIT: id="7a20be78-f11a-490d-be1c-132248599f7a" response="200" I0125 05:16:22.044029 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/cronjobs: (959.653µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.044671 4678 audit.go:125] 2017-01-25T05:16:22.044648587-05:00 AUDIT: id="83b11c2c-96ed-45f1-be5f-71eaf7c43da0" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/scheduledjobs" I0125 05:16:22.045316 4678 audit.go:45] 2017-01-25T05:16:22.045306235-05:00 AUDIT: id="83b11c2c-96ed-45f1-be5f-71eaf7c43da0" response="200" I0125 05:16:22.045364 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/scheduledjobs: (882.363µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.046033 4678 audit.go:125] 2017-01-25T05:16:22.046011066-05:00 AUDIT: id="c8e18b76-3a39-4ec3-9d13-3e0766dc84df" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/daemonsets" I0125 05:16:22.046826 4678 audit.go:45] 2017-01-25T05:16:22.046816641-05:00 AUDIT: id="c8e18b76-3a39-4ec3-9d13-3e0766dc84df" response="200" I0125 05:16:22.046879 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/daemonsets: (1.015771ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.047614 4678 audit.go:125] 2017-01-25T05:16:22.047591683-05:00 AUDIT: id="e91cbca1-a664-485d-946d-fb83eaab4b9b" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deployments" I0125 05:16:22.048344 4678 audit.go:45] 2017-01-25T05:16:22.048334684-05:00 AUDIT: id="e91cbca1-a664-485d-946d-fb83eaab4b9b" response="200" I0125 05:16:22.048424 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/deployments: (1.007611ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.049103 4678 audit.go:125] 2017-01-25T05:16:22.049080675-05:00 AUDIT: id="6d03f9ad-ec25-4759-b538-33a660569b0b" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers" I0125 05:16:22.049870 4678 audit.go:45] 2017-01-25T05:16:22.049861175-05:00 AUDIT: id="6d03f9ad-ec25-4759-b538-33a660569b0b" response="200" I0125 05:16:22.049931 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/horizontalpodautoscalers: (1.021135ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.050568 4678 audit.go:125] 2017-01-25T05:16:22.050545288-05:00 AUDIT: id="653f3634-edd9-434e-829f-5e021dd1e4d0" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/ingresses" I0125 05:16:22.051351 4678 audit.go:45] 2017-01-25T05:16:22.051341087-05:00 AUDIT: id="653f3634-edd9-434e-829f-5e021dd1e4d0" response="200" I0125 05:16:22.051407 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/ingresses: (1.013934ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.052102 4678 audit.go:125] 2017-01-25T05:16:22.052078103-05:00 AUDIT: id="05c31cdf-7fff-4e56-bebe-206218d4aff2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs" I0125 05:16:22.052877 4678 audit.go:45] 2017-01-25T05:16:22.052867611-05:00 AUDIT: id="05c31cdf-7fff-4e56-bebe-206218d4aff2" response="200" I0125 05:16:22.052925 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/jobs: (1.033918ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.053586 4678 audit.go:125] 2017-01-25T05:16:22.05356168-05:00 AUDIT: id="19dd7490-ac1b-417a-956e-17952db9fcc3" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/networkpolicies" I0125 05:16:22.054343 4678 audit.go:45] 2017-01-25T05:16:22.054330272-05:00 AUDIT: id="19dd7490-ac1b-417a-956e-17952db9fcc3" response="200" I0125 05:16:22.054423 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/networkpolicies: (1.023473ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.055093 4678 audit.go:125] 2017-01-25T05:16:22.055067701-05:00 AUDIT: id="a0113603-08c8-4e2d-9c0f-d3151ff09401" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicasets" I0125 05:16:22.055896 4678 audit.go:45] 2017-01-25T05:16:22.05588594-05:00 AUDIT: id="a0113603-08c8-4e2d-9c0f-d3151ff09401" response="200" I0125 05:16:22.055954 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicasets: (1.066317ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.056622 4678 audit.go:125] 2017-01-25T05:16:22.056599718-05:00 AUDIT: id="4ff9075e-68b1-4d52-a895-6a3460d28993" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers" I0125 05:16:22.056734 4678 audit.go:45] 2017-01-25T05:16:22.05672791-05:00 AUDIT: id="4ff9075e-68b1-4d52-a895-6a3460d28993" response="404" I0125 05:16:22.056765 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers: (341.364µs) 404 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.057534 4678 audit.go:125] 2017-01-25T05:16:22.057512024-05:00 AUDIT: id="0942d251-8893-411e-ab6d-710fa7cd71ee" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/poddisruptionbudgets" I0125 05:16:22.058321 4678 audit.go:45] 2017-01-25T05:16:22.058307641-05:00 AUDIT: id="0942d251-8893-411e-ab6d-710fa7cd71ee" response="200" I0125 05:16:22.058380 4678 panics.go:76] GET /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/poddisruptionbudgets: (1.053797ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.059266 4678 audit.go:125] 2017-01-25T05:16:22.059237579-05:00 AUDIT: id="6b3c3ad1-60ba-406c-a118-a808c79d11a2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/configmaps" I0125 05:16:22.060140 4678 audit.go:45] 2017-01-25T05:16:22.060130935-05:00 AUDIT: id="6b3c3ad1-60ba-406c-a118-a808c79d11a2" response="200" I0125 05:16:22.060251 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/configmaps: (1.195456ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.060947 4678 audit.go:125] 2017-01-25T05:16:22.060925187-05:00 AUDIT: id="2caa9ca4-ca62-47ca-87d1-62ac7b827e66" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/endpoints" I0125 05:16:22.061788 4678 audit.go:45] 2017-01-25T05:16:22.061777577-05:00 AUDIT: id="2caa9ca4-ca62-47ca-87d1-62ac7b827e66" response="200" I0125 05:16:22.061847 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/endpoints: (1.092695ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.062533 4678 audit.go:125] 2017-01-25T05:16:22.06249802-05:00 AUDIT: id="cfeb7d1d-ecda-4191-a6a5-29b5774001e5" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events" I0125 05:16:22.063362 4678 audit.go:45] 2017-01-25T05:16:22.063352673-05:00 AUDIT: id="cfeb7d1d-ecda-4191-a6a5-29b5774001e5" response="200" I0125 05:16:22.063430 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/events: (1.110373ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.064123 4678 audit.go:125] 2017-01-25T05:16:22.064101707-05:00 AUDIT: id="3f5fd42f-4942-44b0-bd5a-524392bd08c9" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/limitranges" I0125 05:16:22.064930 4678 audit.go:45] 2017-01-25T05:16:22.064921233-05:00 AUDIT: id="3f5fd42f-4942-44b0-bd5a-524392bd08c9" response="200" I0125 05:16:22.064988 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/limitranges: (1.058877ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.065673 4678 audit.go:125] 2017-01-25T05:16:22.065647502-05:00 AUDIT: id="d8e225b1-bb82-4b9f-a664-a702fb7130ad" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/persistentvolumeclaims" I0125 05:16:22.066464 4678 audit.go:45] 2017-01-25T05:16:22.066454881-05:00 AUDIT: id="d8e225b1-bb82-4b9f-a664-a702fb7130ad" response="200" I0125 05:16:22.066521 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/persistentvolumeclaims: (1.042468ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.067153 4678 audit.go:125] 2017-01-25T05:16:22.067130964-05:00 AUDIT: id="56570ab0-8776-46c5-bed4-99e2de666b3e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods" I0125 05:16:22.067956 4678 audit.go:45] 2017-01-25T05:16:22.067945132-05:00 AUDIT: id="56570ab0-8776-46c5-bed4-99e2de666b3e" response="200" I0125 05:16:22.068027 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/pods: (1.056793ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.068735 4678 audit.go:125] 2017-01-25T05:16:22.068713102-05:00 AUDIT: id="1ddd1281-2172-4c08-9d35-5e78e059b8e4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/podtemplates" I0125 05:16:22.069512 4678 audit.go:45] 2017-01-25T05:16:22.069502056-05:00 AUDIT: id="1ddd1281-2172-4c08-9d35-5e78e059b8e4" response="200" I0125 05:16:22.069565 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/podtemplates: (1.00804ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.070220 4678 audit.go:125] 2017-01-25T05:16:22.070179686-05:00 AUDIT: id="e35df9b9-cff6-470b-abde-0887a64fcc30" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers" I0125 05:16:22.070996 4678 audit.go:45] 2017-01-25T05:16:22.070986609-05:00 AUDIT: id="e35df9b9-cff6-470b-abde-0887a64fcc30" response="200" I0125 05:16:22.071048 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/replicationcontrollers: (1.034787ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.071716 4678 audit.go:125] 2017-01-25T05:16:22.071693741-05:00 AUDIT: id="3f9d8894-6a79-4c8a-a067-64aa35b1a874" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas" I0125 05:16:22.072533 4678 audit.go:45] 2017-01-25T05:16:22.072523158-05:00 AUDIT: id="3f9d8894-6a79-4c8a-a067-64aa35b1a874" response="200" I0125 05:16:22.072588 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/resourcequotas: (1.065488ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.073238 4678 audit.go:125] 2017-01-25T05:16:22.073206935-05:00 AUDIT: id="fd6c158d-fe6d-4488-a82c-be080bc47473" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets" I0125 05:16:22.074030 4678 audit.go:45] 2017-01-25T05:16:22.074020496-05:00 AUDIT: id="fd6c158d-fe6d-4488-a82c-be080bc47473" response="200" I0125 05:16:22.074073 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/secrets: (1.039921ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.074781 4678 audit.go:125] 2017-01-25T05:16:22.074757624-05:00 AUDIT: id="ebab0b33-bed9-4c87-9e0b-22f1248d64d4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts" I0125 05:16:22.075607 4678 audit.go:45] 2017-01-25T05:16:22.075597717-05:00 AUDIT: id="ebab0b33-bed9-4c87-9e0b-22f1248d64d4" response="200" I0125 05:16:22.075675 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/serviceaccounts: (1.111262ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.076375 4678 audit.go:125] 2017-01-25T05:16:22.076351533-05:00 AUDIT: id="72c3cb96-d9a9-4507-89aa-f10b2750732e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-0-bwll6-pnjps" uri="/api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/services" I0125 05:16:22.077159 4678 audit.go:45] 2017-01-25T05:16:22.07714927-05:00 AUDIT: id="72c3cb96-d9a9-4507-89aa-f10b2750732e" response="200" I0125 05:16:22.077216 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-0-bwll6-pnjps/services: (1.046016ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.077905 4678 audit.go:125] 2017-01-25T05:16:22.077881871-05:00 AUDIT: id="727eec2f-a14a-4a82-b0d5-a759ca90afec" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:22.079736 4678 audit.go:45] 2017-01-25T05:16:22.079726853-05:00 AUDIT: id="727eec2f-a14a-4a82-b0d5-a759ca90afec" response="200" I0125 05:16:22.080141 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.414853ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.081398 4678 audit.go:125] 2017-01-25T05:16:22.081376168-05:00 AUDIT: id="cbf79e23-d9bd-4c66-a092-c4043d834363" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:22.082333 4678 audit.go:45] 2017-01-25T05:16:22.082323198-05:00 AUDIT: id="cbf79e23-d9bd-4c66-a092-c4043d834363" response="200" I0125 05:16:22.082558 4678 panics.go:76] GET /api/v1/nodes: (1.351606ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.083137 4678 audit.go:125] 2017-01-25T05:16:22.08311385-05:00 AUDIT: id="c4b5efce-8d57-4183-8eb7-c9c271e977f4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/pods" I0125 05:16:22.084057 4678 audit.go:45] 2017-01-25T05:16:22.084046714-05:00 AUDIT: id="c4b5efce-8d57-4183-8eb7-c9c271e977f4" response="200" I0125 05:16:22.084311 4678 panics.go:76] GET /api/v1/pods: (1.355399ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.085027 4678 audit.go:125] 2017-01-25T05:16:22.085008583-05:00 AUDIT: id="2e240317-29ac-42ff-823a-f6f328752928" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:22.085877 4678 audit.go:45] 2017-01-25T05:16:22.085867003-05:00 AUDIT: id="2e240317-29ac-42ff-823a-f6f328752928" response="200" I0125 05:16:22.086084 4678 panics.go:76] GET /api/v1/nodes: (1.238074ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.086672 4678 audit.go:125] 2017-01-25T05:16:22.086653518-05:00 AUDIT: id="90f33aea-c40c-4d14-ab54-e31fd9407691" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222" I0125 05:16:22.087354 4678 audit.go:45] 2017-01-25T05:16:22.087344638-05:00 AUDIT: id="90f33aea-c40c-4d14-ab54-e31fd9407691" response="200" I0125 05:16:22.087566 4678 panics.go:76] GET /api/v1/nodes/172.18.7.222: (1.076305ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.088268 4678 audit.go:125] 2017-01-25T05:16:22.088243589-05:00 AUDIT: id="52ca71cc-2048-43d1-962f-3896bc3a9330" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="kube-system" uri="/api/v1/namespaces/kube-system/events?fieldSelector=involvedObject.kind%3DNode%2CinvolvedObject.name%3D172.18.7.222%2CinvolvedObject.namespace%3D%2Csource%3Dkubelet" I0125 05:16:22.089090 4678 audit.go:45] 2017-01-25T05:16:22.089080743-05:00 AUDIT: id="52ca71cc-2048-43d1-962f-3896bc3a9330" response="200" I0125 05:16:22.089140 4678 panics.go:76] GET /api/v1/namespaces/kube-system/events?fieldSelector=involvedObject.kind%3DNode%2CinvolvedObject.name%3D172.18.7.222%2CinvolvedObject.namespace%3D%2Csource%3Dkubelet: (1.085139ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.089751 4678 audit.go:125] 2017-01-25T05:16:22.08972966-05:00 AUDIT: id="bffbac54-1e9d-4afb-8d12-bb20ae1fe60f" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/version" I0125 05:16:22.089854 4678 audit.go:45] 2017-01-25T05:16:22.08984689-05:00 AUDIT: id="bffbac54-1e9d-4afb-8d12-bb20ae1fe60f" response="200" I0125 05:16:22.089870 4678 panics.go:76] GET /version: (298.42µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.090514 4678 audit.go:125] 2017-01-25T05:16:22.090493546-05:00 AUDIT: id="d8493c68-b56b-4a90-aea6-35bce72aa28a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222:10250/proxy/pods" I0125 05:16:22.091711 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"get", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/pods"} I0125 05:16:22.092086 4678 server.go:744] GET /pods: (533.584µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:52858] I0125 05:16:22.092141 4678 audit.go:45] 2017-01-25T05:16:22.092128744-05:00 AUDIT: id="d8493c68-b56b-4a90-aea6-35bce72aa28a" response="200" I0125 05:16:22.092367 4678 panics.go:76] GET /api/v1/nodes/172.18.7.222:10250/proxy/pods: (2.037336ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.093430 4678 audit.go:125] 2017-01-25T05:16:22.093412229-05:00 AUDIT: id="3e057dd4-99f5-4c0b-821b-e13a4650091a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:22.094276 4678 audit.go:45] 2017-01-25T05:16:22.094266719-05:00 AUDIT: id="3e057dd4-99f5-4c0b-821b-e13a4650091a" response="200" I0125 05:16:22.094489 4678 panics.go:76] GET /api/v1/nodes: (1.217086ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.095151 4678 audit.go:125] 2017-01-25T05:16:22.095132401-05:00 AUDIT: id="07e8f1d1-51d1-4eca-993b-bd46470bc87c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222" I0125 05:16:22.095970 4678 audit.go:45] 2017-01-25T05:16:22.095960908-05:00 AUDIT: id="07e8f1d1-51d1-4eca-993b-bd46470bc87c" response="200" I0125 05:16:22.096158 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222: (1.1837ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.096756 4678 audit.go:125] 2017-01-25T05:16:22.096735814-05:00 AUDIT: id="64ae7d33-6d73-41e0-946b-190c4654f4c2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/proxy/nodes/172.18.7.222:10250/metrics" I0125 05:16:22.097670 4678 proxy.go:187] [4d568b05e7eae181] Beginning proxy /api/v1/proxy/nodes/172.18.7.222:10250/metrics... I0125 05:16:22.097924 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"get", APIVersion:"v1", APIGroup:"", Resource:"nodes/metrics", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/metrics"} I0125 05:16:22.196539 4678 audit.go:45] 2017-01-25T05:16:22.19651305-05:00 AUDIT: id="64ae7d33-6d73-41e0-946b-190c4654f4c2" response="200" I0125 05:16:22.196570 4678 server.go:744] GET /metrics: (98.764813ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:52858] I0125 05:16:22.197014 4678 proxy.go:189] [4d568b05e7eae181] Proxy /api/v1/proxy/nodes/172.18.7.222:10250/metrics finished 99.342172ms. I0125 05:16:22.197070 4678 panics.go:76] GET /api/v1/proxy/nodes/172.18.7.222:10250/metrics: (100.493222ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.209740 4678 namespace_controller.go:197] Namespace has been deleted extended-test-postgresql-replication-0-bwll6-pnjps I0125 05:16:22.209755 4678 namespace_controller.go:198] Finished syncing namespace "extended-test-postgresql-replication-0-bwll6-pnjps" (382ns) I0125 05:16:22.246441 4678 audit.go:125] 2017-01-25T05:16:22.246404645-05:00 AUDIT: id="1194e18f-ad00-4774-aad3-4788e9e38bb5" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="kube-system" uri="/api/v1/namespaces/kube-system/pods?labelSelector=name%3De2e-image-puller" I0125 05:16:22.247644 4678 audit.go:45] 2017-01-25T05:16:22.247631601-05:00 AUDIT: id="1194e18f-ad00-4774-aad3-4788e9e38bb5" response="200" I0125 05:16:22.247711 4678 panics.go:76] GET /api/v1/namespaces/kube-system/pods?labelSelector=name%3De2e-image-puller: (1.526478ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.248414 4678 audit.go:125] 2017-01-25T05:16:22.24839032-05:00 AUDIT: id="1b8fbd59-0fcc-42c0-9eda-a55e70271847" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:22.249359 4678 audit.go:45] 2017-01-25T05:16:22.249348442-05:00 AUDIT: id="1b8fbd59-0fcc-42c0-9eda-a55e70271847" response="200" I0125 05:16:22.249578 4678 panics.go:76] GET /api/v1/nodes: (1.357985ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.250242 4678 audit.go:125] 2017-01-25T05:16:22.250207845-05:00 AUDIT: id="33042ffc-f237-47ad-8a94-7457958031e4" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:16:22.251869 4678 audit.go:45] 2017-01-25T05:16:22.251856675-05:00 AUDIT: id="33042ffc-f237-47ad-8a94-7457958031e4" response="200" I0125 05:16:22.251926 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.898902ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.252716 4678 audit.go:125] 2017-01-25T05:16:22.25268345-05:00 AUDIT: id="d4b81855-a602-408a-8e9e-052bc1c0e7e1" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/buildconfigs" I0125 05:16:22.252741 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (2.411µs) I0125 05:16:22.253556 4678 audit.go:125] 2017-01-25T05:16:22.253522552-05:00 AUDIT: id="2ed64aa1-4748-4c79-9569-b7cf1f5df34f" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:16:22.253902 4678 audit.go:45] 2017-01-25T05:16:22.253889516-05:00 AUDIT: id="d4b81855-a602-408a-8e9e-052bc1c0e7e1" response="200" I0125 05:16:22.253956 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/buildconfigs: (1.49788ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.254469 4678 audit.go:45] 2017-01-25T05:16:22.254455542-05:00 AUDIT: id="2ed64aa1-4748-4c79-9569-b7cf1f5df34f" response="200" I0125 05:16:22.254520 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.224571ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:22.254790 4678 audit.go:125] 2017-01-25T05:16:22.254759249-05:00 AUDIT: id="deb7b86c-7776-4373-b192-f1e3f0182b26" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/builds" I0125 05:16:22.255580 4678 audit.go:45] 2017-01-25T05:16:22.2555705-05:00 AUDIT: id="deb7b86c-7776-4373-b192-f1e3f0182b26" response="200" I0125 05:16:22.255621 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/builds: (1.101649ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.256179 4678 audit.go:125] 2017-01-25T05:16:22.256157368-05:00 AUDIT: id="ae871ba8-ef41-4adb-9bf0-42890d4a7c68" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs" I0125 05:16:22.256966 4678 audit.go:45] 2017-01-25T05:16:22.256956947-05:00 AUDIT: id="ae871ba8-ef41-4adb-9bf0-42890d4a7c68" response="200" I0125 05:16:22.257027 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deploymentconfigs: (1.035738ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.257628 4678 audit.go:125] 2017-01-25T05:16:22.25760647-05:00 AUDIT: id="a5d1610c-3fff-4281-8345-97273d2b4327" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/egressnetworkpolicies" I0125 05:16:22.258461 4678 audit.go:45] 2017-01-25T05:16:22.258451302-05:00 AUDIT: id="a5d1610c-3fff-4281-8345-97273d2b4327" response="200" I0125 05:16:22.258512 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/egressnetworkpolicies: (1.053312ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.259098 4678 audit.go:125] 2017-01-25T05:16:22.259075644-05:00 AUDIT: id="0045024a-082b-4a77-8843-4e29d3b128c5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams" I0125 05:16:22.259948 4678 audit.go:45] 2017-01-25T05:16:22.259938058-05:00 AUDIT: id="0045024a-082b-4a77-8843-4e29d3b128c5" response="200" I0125 05:16:22.259990 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/imagestreams: (1.080372ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.260586 4678 audit.go:125] 2017-01-25T05:16:22.260555293-05:00 AUDIT: id="79e6ee3c-2a58-4a19-b715-c62828fc4062" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/policies" I0125 05:16:22.261376 4678 audit.go:45] 2017-01-25T05:16:22.261363385-05:00 AUDIT: id="79e6ee3c-2a58-4a19-b715-c62828fc4062" response="200" I0125 05:16:22.261437 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/policies: (1.038781ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.262078 4678 audit.go:125] 2017-01-25T05:16:22.262055907-05:00 AUDIT: id="13665e80-da5c-4677-85fa-5e2fa34f582a" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/policybindings" I0125 05:16:22.263001 4678 audit.go:45] 2017-01-25T05:16:22.262981672-05:00 AUDIT: id="13665e80-da5c-4677-85fa-5e2fa34f582a" response="200" I0125 05:16:22.263074 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/policybindings: (1.177405ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.263649 4678 audit.go:125] 2017-01-25T05:16:22.263620456-05:00 AUDIT: id="4dc373ef-8d57-4de4-be45-b00112eab58b" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/policybindings/:default" I0125 05:16:22.266214 4678 audit.go:45] 2017-01-25T05:16:22.266186229-05:00 AUDIT: id="4dc373ef-8d57-4de4-be45-b00112eab58b" response="200" I0125 05:16:22.266268 4678 panics.go:76] DELETE /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/policybindings/:default: (2.802083ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.266870 4678 audit.go:125] 2017-01-25T05:16:22.26684701-05:00 AUDIT: id="070b9ae5-5d69-4d16-b027-a8d568f40186" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings" I0125 05:16:22.267688 4678 audit.go:45] 2017-01-25T05:16:22.267678244-05:00 AUDIT: id="070b9ae5-5d69-4d16-b027-a8d568f40186" response="200" I0125 05:16:22.267737 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/rolebindings: (1.065124ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.268291 4678 audit.go:125] 2017-01-25T05:16:22.268269957-05:00 AUDIT: id="885c2f86-c67f-41a3-99a1-ef49923970c5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/roles" I0125 05:16:22.268970 4678 audit.go:45] 2017-01-25T05:16:22.268955479-05:00 AUDIT: id="885c2f86-c67f-41a3-99a1-ef49923970c5" response="200" I0125 05:16:22.269016 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/roles: (915.413µs) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.269548 4678 audit.go:125] 2017-01-25T05:16:22.269526556-05:00 AUDIT: id="969531a2-180d-4698-9aee-40e19596a1f4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/routes" I0125 05:16:22.270296 4678 audit.go:45] 2017-01-25T05:16:22.270286058-05:00 AUDIT: id="969531a2-180d-4698-9aee-40e19596a1f4" response="200" I0125 05:16:22.270347 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/routes: (981.299µs) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.270827 4678 audit.go:125] 2017-01-25T05:16:22.270805331-05:00 AUDIT: id="ad25c5da-8e61-48b3-9b42-d04d62ba148b" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/templates" I0125 05:16:22.271557 4678 audit.go:45] 2017-01-25T05:16:22.271545359-05:00 AUDIT: id="ad25c5da-8e61-48b3-9b42-d04d62ba148b" response="200" I0125 05:16:22.271605 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/templates: (958.194µs) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:22.272180 4678 audit.go:125] 2017-01-25T05:16:22.272159331-05:00 AUDIT: id="f677ae53-3311-4057-9b3c-cd4f31738d77" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/finalize" I0125 05:16:22.273620 4678 audit.go:45] 2017-01-25T05:16:22.273600578-05:00 AUDIT: id="f677ae53-3311-4057-9b3c-cd4f31738d77" response="200" I0125 05:16:22.273678 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/finalize: (1.673574ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:22.273933 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (2.25µs) I0125 05:16:22.587571 4678 helpers.go:101] Unable to get network stats from pid 10842: couldn't read network stats: failure opening /proc/10842/net/dev: open /proc/10842/net/dev: no such file or directory I0125 05:16:22.967195 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:22.967224 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:22.967924 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:22.967941 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:22.968459 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:22 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc438776600 0 [] true false map[] 0xc43544cf00 } I0125 05:16:22.968514 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:22.968788 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:22 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc438776720 0 [] true false map[] 0xc42e3acff0 } I0125 05:16:22.968820 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:23.453903 4678 audit.go:125] 2017-01-25T05:16:23.453870465-05:00 AUDIT: id="d7b349fa-17c3-410d-96b4-5cc1ad3d1e25" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:23.454334 4678 audit.go:45] 2017-01-25T05:16:23.454325257-05:00 AUDIT: id="d7b349fa-17c3-410d-96b4-5cc1ad3d1e25" response="200" I0125 05:16:23.454674 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (1.044209ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:23.454924 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:16:23.684606 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:24.643695 4678 panics.go:76] GET /oapi/v1/watch/buildconfigs?resourceVersion=10587&timeoutSeconds=357: (5m57.001131081s) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:24.643933 4678 reflector.go:392] github.com/openshift/origin/pkg/controller/shared/shared_informer.go:89: Watch close - *api.BuildConfig total 1 items received I0125 05:16:24.644572 4678 audit.go:125] 2017-01-25T05:16:24.644539341-05:00 AUDIT: id="35c88cdb-d4ae-4d53-be10-b590a117df8f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/oapi/v1/watch/buildconfigs?resourceVersion=10733&timeoutSeconds=469" I0125 05:16:24.645042 4678 audit.go:45] 2017-01-25T05:16:24.645027234-05:00 AUDIT: id="35c88cdb-d4ae-4d53-be10-b590a117df8f" response="200" I0125 05:16:25.107189 4678 helpers.go:101] Unable to get network stats from pid 12824: couldn't read network stats: failure opening /proc/12824/net/dev: open /proc/12824/net/dev: no such file or directory I0125 05:16:25.386048 4678 audit.go:125] 2017-01-25T05:16:25.386014903-05:00 AUDIT: id="a26dfb1e-5a24-4f1e-8bdf-3afb8a4a28f6" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:16:25.387062 4678 audit.go:45] 2017-01-25T05:16:25.387047712-05:00 AUDIT: id="a26dfb1e-5a24-4f1e-8bdf-3afb8a4a28f6" response="200" I0125 05:16:25.387126 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.326138ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:25.684593 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:26.684596 4678 kubelet.go:1835] SyncLoop (SYNC): 1 pods; router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094) I0125 05:16:26.684649 4678 kubelet_pods.go:1029] Generating status for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:26.684812 4678 status_manager.go:312] Ignoring same status for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)", status: {Phase:Running Conditions:[{Type:Initialized Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:} {Type:Ready Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:29 -0500 EST Reason: Message:} {Type:PodScheduled Status:True LastProbeTime:0001-01-01 00:00:00 +0000 UTC LastTransitionTime:2017-01-25 03:41:09 -0500 EST Reason: Message:}] Message: Reason: HostIP:172.18.7.222 PodIP:172.18.7.222 StartTime:2017-01-25 03:41:09 -0500 EST InitContainerStatuses:[] ContainerStatuses:[{Name:router State:{Waiting: Running:0xc4322b7aa0 Terminated:} LastTerminationState:{Waiting: Running: Terminated:} Ready:true RestartCount:0 Image:openshift/origin-haproxy-router:86a9783 ImageID:docker://sha256:0e944dc1f6ca904b8892fd8e5da5ec5cf13c0f673b44380cc81c1fdbc53b379e ContainerID:docker://38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018}]} I0125 05:16:26.684908 4678 volume_manager.go:336] Waiting for volumes to attach and mount for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:26.724027 4678 secret.go:179] Setting up volume server-certificate for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:16:26.724140 4678 secret.go:179] Setting up volume router-token-s79l8 for pod 04c98b55-e2da-11e6-a4b0-0e6a5cbf0094 at /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:16:26.724878 4678 audit.go:125] 2017-01-25T05:16:26.724843214-05:00 AUDIT: id="45212e30-0e8d-4927-b18a-9cbb16fd610f" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-token-s79l8" I0125 05:16:26.725046 4678 audit.go:125] 2017-01-25T05:16:26.725021434-05:00 AUDIT: id="af391388-92dc-40ff-ad5c-1d56340a5279" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-certs" I0125 05:16:26.726252 4678 audit.go:45] 2017-01-25T05:16:26.726237159-05:00 AUDIT: id="45212e30-0e8d-4927-b18a-9cbb16fd610f" response="200" I0125 05:16:26.726252 4678 audit.go:45] 2017-01-25T05:16:26.726240859-05:00 AUDIT: id="af391388-92dc-40ff-ad5c-1d56340a5279" response="200" I0125 05:16:26.726550 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-token-s79l8: (1.96556ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:26.726587 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-certs: (2.025198ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:26.726767 4678 secret.go:206] Received secret default/router-certs containing (2) pieces of data, 6633 total bytes I0125 05:16:26.726916 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume server-certificate: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/server-certificate I0125 05:16:26.726928 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-server-certificate" (spec.Name: "server-certificate") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:16:26.727008 4678 secret.go:206] Received secret default/router-token-s79l8 containing (4) pieces of data, 4105 total bytes I0125 05:16:26.727136 4678 atomic_writer.go:142] pod default/router-2-tnqzg volume router-token-s79l8: no update required for target directory /mnt/openshift-xfs-vol-dir/pods/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094/volumes/kubernetes.io~secret/router-token-s79l8 I0125 05:16:26.727146 4678 operation_executor.go:917] MountVolume.SetUp succeeded for volume "kubernetes.io/secret/04c98b55-e2da-11e6-a4b0-0e6a5cbf0094-router-token-s79l8" (spec.Name: "router-token-s79l8") pod "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094" (UID: "04c98b55-e2da-11e6-a4b0-0e6a5cbf0094"). I0125 05:16:26.985139 4678 volume_manager.go:365] All volumes are attached and mounted for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:26.985792 4678 audit.go:125] 2017-01-25T05:16:26.98575476-05:00 AUDIT: id="5c77e805-92f7-4c32-8fbf-3a6f0fa8e942" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/secrets/router-dockercfg-g5x9s" I0125 05:16:26.986897 4678 audit.go:45] 2017-01-25T05:16:26.986886184-05:00 AUDIT: id="5c77e805-92f7-4c32-8fbf-3a6f0fa8e942" response="200" I0125 05:16:26.987046 4678 panics.go:76] GET /api/v1/namespaces/default/secrets/router-dockercfg-g5x9s: (1.505086ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:26.987240 4678 docker_manager.go:1938] Found pod infra container for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:26.989302 4678 docker_manager.go:1951] Pod infra container looks good, keep it "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" I0125 05:16:26.989316 4678 docker_manager.go:1999] pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)" container "router" exists as 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018 I0125 05:16:26.989421 4678 docker_manager.go:2086] Got container changes for pod "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094)": {StartInfraContainer:false InfraChanged:false InfraContainerId:188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985 InitFailed:false InitContainersToKeep:map[] ContainersToStart:map[] ContainersToKeep:map[188d4e0914dd28bdf7d2fcdd075d10d5db9c14ede72c6c0417fe6eaf68450985:-1 38411a172568e72bef70bbbe7212e946ddb4a586e3b9870b06d7295b5e7bf018:0]} I0125 05:16:27.255515 4678 audit.go:125] 2017-01-25T05:16:27.255469028-05:00 AUDIT: id="b8c54ba7-8483-4c7f-911b-0fbadf1e853c" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:16:27.255758 4678 audit.go:125] 2017-01-25T05:16:27.255724198-05:00 AUDIT: id="a87e6c69-5b18-4f1c-a17b-70217980947a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:16:27.256667 4678 audit.go:45] 2017-01-25T05:16:27.25665718-05:00 AUDIT: id="a87e6c69-5b18-4f1c-a17b-70217980947a" response="200" I0125 05:16:27.256744 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.236551ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:27.256793 4678 audit.go:45] 2017-01-25T05:16:27.256781188-05:00 AUDIT: id="b8c54ba7-8483-4c7f-911b-0fbadf1e853c" response="200" I0125 05:16:27.256839 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (3.32007ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.257127 4678 namespace_controller_utils.go:352] namespace controller - deleteAllContent - namespace: extended-test-postgresql-replication-1-34bbd-xd4g8, gvrs: [{apps v1beta1 statefulsets} {autoscaling v1 horizontalpodautoscalers} {batch v1 jobs} {batch v2alpha1 cronjobs} {batch v2alpha1 scheduledjobs} {extensions v1beta1 daemonsets} {extensions v1beta1 deployments} {extensions v1beta1 horizontalpodautoscalers} {extensions v1beta1 ingresses} {extensions v1beta1 jobs} {extensions v1beta1 networkpolicies} {extensions v1beta1 replicasets} {extensions v1beta1 replicationcontrollers} {policy v1beta1 poddisruptionbudgets} { v1 bindings} { v1 configmaps} { v1 endpoints} { v1 events} { v1 limitranges} { v1 persistentvolumeclaims} { v1 serviceaccounts} { v1 podtemplates} { v1 replicationcontrollers} { v1 resourcequotas} { v1 secrets} { v1 services} { v1 pods}] I0125 05:16:27.259088 4678 audit.go:125] 2017-01-25T05:16:27.259065034-05:00 AUDIT: id="49dca8a4-705e-49ea-b043-849b6deffbb6" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets" I0125 05:16:27.260018 4678 audit.go:45] 2017-01-25T05:16:27.260008636-05:00 AUDIT: id="49dca8a4-705e-49ea-b043-849b6deffbb6" response="200" I0125 05:16:27.260097 4678 panics.go:76] DELETE /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets: (2.473953ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.262074 4678 audit.go:125] 2017-01-25T05:16:27.262040029-05:00 AUDIT: id="58da1948-99c7-43c4-b7b4-dd0b7770c339" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets" I0125 05:16:27.262863 4678 audit.go:45] 2017-01-25T05:16:27.262853436-05:00 AUDIT: id="58da1948-99c7-43c4-b7b4-dd0b7770c339" response="200" I0125 05:16:27.262916 4678 panics.go:76] GET /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets: (2.420137ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.264902 4678 audit.go:125] 2017-01-25T05:16:27.264878577-05:00 AUDIT: id="844ad75c-8411-4ddd-9224-cc45b47b8773" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers" I0125 05:16:27.265765 4678 audit.go:45] 2017-01-25T05:16:27.265755294-05:00 AUDIT: id="844ad75c-8411-4ddd-9224-cc45b47b8773" response="200" I0125 05:16:27.265817 4678 panics.go:76] DELETE /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers: (2.324145ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.267781 4678 audit.go:125] 2017-01-25T05:16:27.267758439-05:00 AUDIT: id="23ff4027-787e-4949-abb1-ccf2fab616fe" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers" I0125 05:16:27.268449 4678 audit.go:45] 2017-01-25T05:16:27.268439492-05:00 AUDIT: id="23ff4027-787e-4949-abb1-ccf2fab616fe" response="200" I0125 05:16:27.268510 4678 panics.go:76] GET /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers: (2.302362ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.270442 4678 audit.go:125] 2017-01-25T05:16:27.27041625-05:00 AUDIT: id="34d4a04f-ef93-4e3c-ba29-df9e820fa575" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs" I0125 05:16:27.271275 4678 audit.go:45] 2017-01-25T05:16:27.271265167-05:00 AUDIT: id="34d4a04f-ef93-4e3c-ba29-df9e820fa575" response="200" I0125 05:16:27.271327 4678 panics.go:76] DELETE /apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs: (2.228408ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.273112 4678 audit.go:125] 2017-01-25T05:16:27.273089632-05:00 AUDIT: id="d3327fb2-b775-4546-ada7-921c3cce72f0" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs" I0125 05:16:27.273826 4678 audit.go:45] 2017-01-25T05:16:27.273816758-05:00 AUDIT: id="d3327fb2-b775-4546-ada7-921c3cce72f0" response="200" I0125 05:16:27.273874 4678 panics.go:76] GET /apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs: (2.234625ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.275927 4678 audit.go:125] 2017-01-25T05:16:27.275904583-05:00 AUDIT: id="17bf819b-01cc-486c-92b3-9a51654751cc" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/cronjobs" I0125 05:16:27.276780 4678 audit.go:45] 2017-01-25T05:16:27.276770648-05:00 AUDIT: id="17bf819b-01cc-486c-92b3-9a51654751cc" response="200" I0125 05:16:27.276832 4678 panics.go:76] DELETE /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/cronjobs: (2.388396ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.278473 4678 audit.go:125] 2017-01-25T05:16:27.278441153-05:00 AUDIT: id="4dad91b2-fe2a-4b10-97bb-6531754ce951" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/cronjobs" I0125 05:16:27.279129 4678 audit.go:45] 2017-01-25T05:16:27.279119404-05:00 AUDIT: id="4dad91b2-fe2a-4b10-97bb-6531754ce951" response="200" I0125 05:16:27.279184 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/cronjobs: (2.028153ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.281071 4678 audit.go:125] 2017-01-25T05:16:27.281045761-05:00 AUDIT: id="c57a0320-38f0-4843-bf9f-7b61247b07cf" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/scheduledjobs" I0125 05:16:27.281895 4678 audit.go:45] 2017-01-25T05:16:27.281885701-05:00 AUDIT: id="c57a0320-38f0-4843-bf9f-7b61247b07cf" response="200" I0125 05:16:27.281950 4678 panics.go:76] DELETE /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/scheduledjobs: (2.260096ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.283839 4678 audit.go:125] 2017-01-25T05:16:27.283812473-05:00 AUDIT: id="02c94b71-aa9d-4763-91bc-5c347ab5f862" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/scheduledjobs" I0125 05:16:27.284526 4678 audit.go:45] 2017-01-25T05:16:27.284514231-05:00 AUDIT: id="02c94b71-aa9d-4763-91bc-5c347ab5f862" response="200" I0125 05:16:27.284583 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/scheduledjobs: (2.232318ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.286628 4678 audit.go:125] 2017-01-25T05:16:27.286600888-05:00 AUDIT: id="1b253a7c-62ef-4bda-9c09-dac8c173d1c9" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/daemonsets" I0125 05:16:27.287506 4678 audit.go:45] 2017-01-25T05:16:27.287496268-05:00 AUDIT: id="1b253a7c-62ef-4bda-9c09-dac8c173d1c9" response="200" I0125 05:16:27.287565 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/daemonsets: (2.417328ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.289308 4678 audit.go:125] 2017-01-25T05:16:27.28928522-05:00 AUDIT: id="776148ac-2366-40ed-9c3c-3edd207b65bf" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/daemonsets" I0125 05:16:27.289915 4678 audit.go:45] 2017-01-25T05:16:27.28990616-05:00 AUDIT: id="776148ac-2366-40ed-9c3c-3edd207b65bf" response="200" I0125 05:16:27.289962 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/daemonsets: (2.094153ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.291958 4678 audit.go:125] 2017-01-25T05:16:27.291934222-05:00 AUDIT: id="14d328b3-cd92-41b7-a0f0-36de099c83cc" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments" I0125 05:16:27.292859 4678 audit.go:45] 2017-01-25T05:16:27.2928499-05:00 AUDIT: id="14d328b3-cd92-41b7-a0f0-36de099c83cc" response="200" I0125 05:16:27.292915 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments: (2.423494ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.294814 4678 audit.go:125] 2017-01-25T05:16:27.294768044-05:00 AUDIT: id="0ca6c252-eb71-4319-a2eb-4aaecc6b1d05" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments" I0125 05:16:27.295496 4678 audit.go:45] 2017-01-25T05:16:27.295486749-05:00 AUDIT: id="0ca6c252-eb71-4319-a2eb-4aaecc6b1d05" response="200" I0125 05:16:27.295544 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments: (2.239351ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.297572 4678 audit.go:125] 2017-01-25T05:16:27.29753896-05:00 AUDIT: id="e1e4ea09-8fa3-43b6-9e8b-b5b909b7d6db" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers" I0125 05:16:27.298463 4678 audit.go:45] 2017-01-25T05:16:27.2984529-05:00 AUDIT: id="e1e4ea09-8fa3-43b6-9e8b-b5b909b7d6db" response="200" I0125 05:16:27.298513 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers: (2.445986ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.300470 4678 audit.go:125] 2017-01-25T05:16:27.300444359-05:00 AUDIT: id="02a66a77-d40c-4b70-a80b-ff5f310809a1" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers" I0125 05:16:27.301153 4678 audit.go:45] 2017-01-25T05:16:27.301143976-05:00 AUDIT: id="02a66a77-d40c-4b70-a80b-ff5f310809a1" response="200" I0125 05:16:27.301217 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers: (2.279958ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.303106 4678 audit.go:125] 2017-01-25T05:16:27.303080555-05:00 AUDIT: id="1c418103-6856-4ac9-ba4a-375591828bee" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/ingresses" I0125 05:16:27.303978 4678 audit.go:45] 2017-01-25T05:16:27.303968085-05:00 AUDIT: id="1c418103-6856-4ac9-ba4a-375591828bee" response="200" I0125 05:16:27.304028 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/ingresses: (2.250869ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.305800 4678 audit.go:125] 2017-01-25T05:16:27.305776261-05:00 AUDIT: id="2e56c38c-6519-4e0f-97fa-edb52107a0ae" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/ingresses" I0125 05:16:27.306531 4678 audit.go:45] 2017-01-25T05:16:27.30652185-05:00 AUDIT: id="2e56c38c-6519-4e0f-97fa-edb52107a0ae" response="200" I0125 05:16:27.306579 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/ingresses: (2.202266ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.308628 4678 audit.go:125] 2017-01-25T05:16:27.308605227-05:00 AUDIT: id="c0b2a527-0d5f-424e-b8da-d025102a9511" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs" I0125 05:16:27.309489 4678 audit.go:45] 2017-01-25T05:16:27.309479268-05:00 AUDIT: id="c0b2a527-0d5f-424e-b8da-d025102a9511" response="200" I0125 05:16:27.309548 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs: (2.442629ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.311293 4678 audit.go:125] 2017-01-25T05:16:27.311270277-05:00 AUDIT: id="9324c6e3-35d3-4479-8733-1f3d2171483f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs" I0125 05:16:27.311974 4678 audit.go:45] 2017-01-25T05:16:27.311964981-05:00 AUDIT: id="9324c6e3-35d3-4479-8733-1f3d2171483f" response="200" I0125 05:16:27.312024 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs: (2.103893ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.314151 4678 audit.go:125] 2017-01-25T05:16:27.314127777-05:00 AUDIT: id="d0e491c2-05d0-410b-9e1f-4675300c5272" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/networkpolicies" I0125 05:16:27.315019 4678 audit.go:45] 2017-01-25T05:16:27.315009859-05:00 AUDIT: id="d0e491c2-05d0-410b-9e1f-4675300c5272" response="200" I0125 05:16:27.315073 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/networkpolicies: (2.477939ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.317002 4678 audit.go:125] 2017-01-25T05:16:27.316975913-05:00 AUDIT: id="dabe2776-081a-4776-b1d7-758b21398d0e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/networkpolicies" I0125 05:16:27.317669 4678 audit.go:45] 2017-01-25T05:16:27.317658015-05:00 AUDIT: id="dabe2776-081a-4776-b1d7-758b21398d0e" response="200" I0125 05:16:27.317726 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/networkpolicies: (2.235498ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.319561 4678 audit.go:125] 2017-01-25T05:16:27.319537111-05:00 AUDIT: id="31739670-12f3-451f-89f4-3855cfcf6503" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets" I0125 05:16:27.320389 4678 audit.go:45] 2017-01-25T05:16:27.320375134-05:00 AUDIT: id="31739670-12f3-451f-89f4-3855cfcf6503" response="200" I0125 05:16:27.320445 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets: (2.173462ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.322268 4678 audit.go:125] 2017-01-25T05:16:27.322245246-05:00 AUDIT: id="7a0305bf-d011-4daf-b167-c170bdf8c2e4" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets" I0125 05:16:27.322954 4678 audit.go:45] 2017-01-25T05:16:27.322944786-05:00 AUDIT: id="7a0305bf-d011-4daf-b167-c170bdf8c2e4" response="200" I0125 05:16:27.323012 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets: (2.245908ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.325138 4678 audit.go:125] 2017-01-25T05:16:27.325102804-05:00 AUDIT: id="3e097cbc-7746-4f2e-a66b-36e1e4726f0c" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/poddisruptionbudgets" I0125 05:16:27.326006 4678 audit.go:45] 2017-01-25T05:16:27.325996692-05:00 AUDIT: id="3e097cbc-7746-4f2e-a66b-36e1e4726f0c" response="200" I0125 05:16:27.326060 4678 panics.go:76] DELETE /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/poddisruptionbudgets: (2.456326ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.327719 4678 audit.go:125] 2017-01-25T05:16:27.327690946-05:00 AUDIT: id="3117696b-9068-4f24-a8cf-fdc451d45f35" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/poddisruptionbudgets" I0125 05:16:27.328345 4678 audit.go:45] 2017-01-25T05:16:27.328335104-05:00 AUDIT: id="3117696b-9068-4f24-a8cf-fdc451d45f35" response="200" I0125 05:16:27.328399 4678 panics.go:76] GET /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/poddisruptionbudgets: (1.984893ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.330128 4678 audit.go:125] 2017-01-25T05:16:27.330106052-05:00 AUDIT: id="058b4cb3-9381-4479-893e-58f15ce13b9b" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/configmaps" I0125 05:16:27.330971 4678 audit.go:45] 2017-01-25T05:16:27.330961481-05:00 AUDIT: id="058b4cb3-9381-4479-893e-58f15ce13b9b" response="200" I0125 05:16:27.331021 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/configmaps: (2.076879ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.332793 4678 audit.go:125] 2017-01-25T05:16:27.332767736-05:00 AUDIT: id="71a2ae41-faad-413a-afe4-fbb8c0226877" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/configmaps" I0125 05:16:27.333590 4678 audit.go:45] 2017-01-25T05:16:27.333581097-05:00 AUDIT: id="71a2ae41-faad-413a-afe4-fbb8c0226877" response="200" I0125 05:16:27.333648 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/configmaps: (2.26617ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.335655 4678 audit.go:125] 2017-01-25T05:16:27.335632863-05:00 AUDIT: id="9673e15b-4ec6-419c-ae28-17e64907ddf8" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:16:27.336622 4678 audit.go:45] 2017-01-25T05:16:27.336608522-05:00 AUDIT: id="9673e15b-4ec6-419c-ae28-17e64907ddf8" response="200" I0125 05:16:27.336678 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (2.487819ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.338399 4678 audit.go:125] 2017-01-25T05:16:27.33837103-05:00 AUDIT: id="b76cf6e1-869b-4259-9193-03e5273f8ad3" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:16:27.339021 4678 audit.go:45] 2017-01-25T05:16:27.339012262-05:00 AUDIT: id="b76cf6e1-869b-4259-9193-03e5273f8ad3" response="200" I0125 05:16:27.339067 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (2.00921ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.341007 4678 audit.go:125] 2017-01-25T05:16:27.340983643-05:00 AUDIT: id="ac793741-4295-49cc-8aff-79a8784ff92b" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:27.444689 4678 audit.go:45] 2017-01-25T05:16:27.444672663-05:00 AUDIT: id="ac793741-4295-49cc-8aff-79a8784ff92b" response="200" I0125 05:16:27.445772 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (106.147783ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.447699 4678 audit.go:125] 2017-01-25T05:16:27.447670691-05:00 AUDIT: id="4e27b3fc-eac3-4d3c-b13e-496c5729351d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:27.448571 4678 audit.go:45] 2017-01-25T05:16:27.44856221-05:00 AUDIT: id="4e27b3fc-eac3-4d3c-b13e-496c5729351d" response="200" I0125 05:16:27.448627 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (2.483294ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.450674 4678 audit.go:125] 2017-01-25T05:16:27.450651067-05:00 AUDIT: id="8927aefe-0753-4200-9c87-2b83f1722249" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges" I0125 05:16:27.451574 4678 audit.go:45] 2017-01-25T05:16:27.451559695-05:00 AUDIT: id="8927aefe-0753-4200-9c87-2b83f1722249" response="200" I0125 05:16:27.451634 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges: (2.417304ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.453169 4678 audit.go:125] 2017-01-25T05:16:27.453144153-05:00 AUDIT: id="501ca8f5-7ba9-42b6-9e96-e6bdab699e76" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges" I0125 05:16:27.453895 4678 audit.go:45] 2017-01-25T05:16:27.453885872-05:00 AUDIT: id="501ca8f5-7ba9-42b6-9e96-e6bdab699e76" response="200" I0125 05:16:27.453944 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges: (1.941505ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.455986 4678 audit.go:125] 2017-01-25T05:16:27.455953245-05:00 AUDIT: id="539e5176-2533-4806-a91d-c48b4b51922c" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims" I0125 05:16:27.456864 4678 audit.go:45] 2017-01-25T05:16:27.456854117-05:00 AUDIT: id="539e5176-2533-4806-a91d-c48b4b51922c" response="200" I0125 05:16:27.456916 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims: (2.423376ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.458741 4678 audit.go:125] 2017-01-25T05:16:27.458708833-05:00 AUDIT: id="cc112a2d-f20d-4b1e-800a-02b4aa218fea" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims" I0125 05:16:27.459450 4678 audit.go:45] 2017-01-25T05:16:27.459440572-05:00 AUDIT: id="cc112a2d-f20d-4b1e-800a-02b4aa218fea" response="200" I0125 05:16:27.459501 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims: (2.21377ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.461130 4678 audit.go:125] 2017-01-25T05:16:27.461107858-05:00 AUDIT: id="71c97422-1f48-4acb-916d-ede3e74523ed" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts" I0125 05:16:27.464601 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-1-34bbd-xd4g8/builder), service account deleted, removing tokens I0125 05:16:27.465023 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (2.308µs) I0125 05:16:27.465558 4678 audit.go:125] 2017-01-25T05:16:27.465526041-05:00 AUDIT: id="4718104b-82e9-4de0-9363-582ac1eb6335" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh" I0125 05:16:27.466637 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-1-34bbd-xd4g8/default), service account deleted, removing tokens I0125 05:16:27.467046 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (1.913µs) I0125 05:16:27.467755 4678 audit.go:125] 2017-01-25T05:16:27.467724049-05:00 AUDIT: id="93eaf471-be7a-4401-b936-d5a57ceca439" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw" I0125 05:16:27.468734 4678 audit.go:45] 2017-01-25T05:16:27.468720297-05:00 AUDIT: id="4718104b-82e9-4de0-9363-582ac1eb6335" response="200" I0125 05:16:27.468787 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh: (3.50459ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.470222 4678 audit.go:45] 2017-01-25T05:16:27.470194331-05:00 AUDIT: id="71c97422-1f48-4acb-916d-ede3e74523ed" response="200" I0125 05:16:27.470339 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts: (10.380474ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.470623 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (3.331µs) I0125 05:16:27.470730 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-1-34bbd-xd4g8/deployer), service account deleted, removing tokens I0125 05:16:27.472660 4678 audit.go:125] 2017-01-25T05:16:27.472622657-05:00 AUDIT: id="a4f1dbaf-de1e-47f5-bfd1-7c0f142e793a" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-sjqpf" I0125 05:16:27.473091 4678 audit.go:45] 2017-01-25T05:16:27.473076985-05:00 AUDIT: id="93eaf471-be7a-4401-b936-d5a57ceca439" response="200" I0125 05:16:27.473138 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-0g2nw: (5.641773ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.473399 4678 audit.go:125] 2017-01-25T05:16:27.473367754-05:00 AUDIT: id="d6ccdd5f-f0fd-4f39-bcc8-810c5ad8643f" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4" I0125 05:16:27.473932 4678 audit.go:125] 2017-01-25T05:16:27.473899161-05:00 AUDIT: id="91333338-850e-43d5-83c2-f1808b9ebff5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:27.475433 4678 audit.go:45] 2017-01-25T05:16:27.475418727-05:00 AUDIT: id="91333338-850e-43d5-83c2-f1808b9ebff5" response="200" I0125 05:16:27.475986 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.318265ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.476939 4678 audit.go:125] 2017-01-25T05:16:27.476904498-05:00 AUDIT: id="c9311908-a7ee-456e-833a-f0688a3a5423" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts" I0125 05:16:27.477178 4678 audit.go:125] 2017-01-25T05:16:27.477147764-05:00 AUDIT: id="14e75276-b073-46a6-81bf-ddea71ce50e0" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89" I0125 05:16:27.478101 4678 audit.go:125] 2017-01-25T05:16:27.478068546-05:00 AUDIT: id="f3c6355b-c75d-45d0-9731-6ba8bc5cc2d6" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-dockercfg-l993x" I0125 05:16:27.478975 4678 audit.go:45] 2017-01-25T05:16:27.478965915-05:00 AUDIT: id="c9311908-a7ee-456e-833a-f0688a3a5423" response="200" I0125 05:16:27.479030 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts: (7.157674ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.479827 4678 audit.go:45] 2017-01-25T05:16:27.479814182-05:00 AUDIT: id="a4f1dbaf-de1e-47f5-bfd1-7c0f142e793a" response="200" I0125 05:16:27.479871 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-sjqpf: (7.48519ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.482551 4678 audit.go:45] 2017-01-25T05:16:27.482536629-05:00 AUDIT: id="d6ccdd5f-f0fd-4f39-bcc8-810c5ad8643f" response="200" I0125 05:16:27.482592 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4: (9.468301ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.483450 4678 audit.go:125] 2017-01-25T05:16:27.483418498-05:00 AUDIT: id="4b13efbe-cc97-4949-a60e-3c1b33f37a5d" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8" I0125 05:16:27.484809 4678 audit.go:125] 2017-01-25T05:16:27.484774573-05:00 AUDIT: id="427b882d-1d69-4578-909b-3eb8612c7855" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/podtemplates" I0125 05:16:27.485802 4678 audit.go:45] 2017-01-25T05:16:27.485788358-05:00 AUDIT: id="14e75276-b073-46a6-81bf-ddea71ce50e0" response="200" I0125 05:16:27.485849 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89: (8.932894ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.486022 4678 audit.go:45] 2017-01-25T05:16:27.486009971-05:00 AUDIT: id="427b882d-1d69-4578-909b-3eb8612c7855" response="200" I0125 05:16:27.486091 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/podtemplates: (5.16174ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.487801 4678 audit.go:45] 2017-01-25T05:16:27.487787939-05:00 AUDIT: id="f3c6355b-c75d-45d0-9731-6ba8bc5cc2d6" response="200" I0125 05:16:27.487847 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-dockercfg-l993x: (10.0101ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.488994 4678 audit.go:125] 2017-01-25T05:16:27.488959995-05:00 AUDIT: id="f0177f27-a805-47e1-a97c-9573b9438c9a" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/podtemplates" I0125 05:16:27.489898 4678 audit.go:125] 2017-01-25T05:16:27.489866211-05:00 AUDIT: id="8bc5ba8a-aac8-4f81-bc1e-6f5d17724ef6" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder" I0125 05:16:27.489966 4678 audit.go:45] 2017-01-25T05:16:27.489953311-05:00 AUDIT: id="4b13efbe-cc97-4949-a60e-3c1b33f37a5d" response="200" I0125 05:16:27.490006 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-r7jj8: (6.836506ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.490763 4678 audit.go:45] 2017-01-25T05:16:27.490749793-05:00 AUDIT: id="f0177f27-a805-47e1-a97c-9573b9438c9a" response="200" I0125 05:16:27.490828 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/podtemplates: (4.263318ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.491885 4678 audit.go:125] 2017-01-25T05:16:27.491849783-05:00 AUDIT: id="2285adcb-3eb0-4bb7-8df7-de67467948eb" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:27.491889 4678 audit.go:45] 2017-01-25T05:16:27.491878994-05:00 AUDIT: id="8bc5ba8a-aac8-4f81-bc1e-6f5d17724ef6" response="404" I0125 05:16:27.492294 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/builder: (2.658386ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.493545 4678 audit.go:45] 2017-01-25T05:16:27.4935351-05:00 AUDIT: id="2285adcb-3eb0-4bb7-8df7-de67467948eb" response="200" I0125 05:16:27.493669 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.597822ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.494044 4678 audit.go:125] 2017-01-25T05:16:27.494018661-05:00 AUDIT: id="bc8d8e99-86da-4caf-99f6-ffda629cec25" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh" I0125 05:16:27.495117 4678 audit.go:125] 2017-01-25T05:16:27.495084863-05:00 AUDIT: id="f1275de2-fbaa-4c43-a766-40fabd9afe8e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:27.495553 4678 audit.go:125] 2017-01-25T05:16:27.495519813-05:00 AUDIT: id="5a7b4a42-176c-4d9a-a58b-1f6ada658257" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:27.495891 4678 audit.go:45] 2017-01-25T05:16:27.495878902-05:00 AUDIT: id="bc8d8e99-86da-4caf-99f6-ffda629cec25" response="404" I0125 05:16:27.495937 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/builder-token-004fh: (2.064764ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.496779 4678 audit.go:45] 2017-01-25T05:16:27.496766428-05:00 AUDIT: id="5a7b4a42-176c-4d9a-a58b-1f6ada658257" response="200" I0125 05:16:27.496846 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (4.090199ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.496779 4678 audit.go:45] 2017-01-25T05:16:27.496769359-05:00 AUDIT: id="f1275de2-fbaa-4c43-a766-40fabd9afe8e" response="200" I0125 05:16:27.497088 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.238901ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.497816 4678 audit.go:125] 2017-01-25T05:16:27.497782925-05:00 AUDIT: id="c74b8639-eb2f-4b1c-b552-b1d341fa6cce" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:27.499010 4678 audit.go:45] 2017-01-25T05:16:27.498996413-05:00 AUDIT: id="c74b8639-eb2f-4b1c-b552-b1d341fa6cce" response="200" I0125 05:16:27.499103 4678 audit.go:125] 2017-01-25T05:16:27.499068678-05:00 AUDIT: id="6dac6ee0-78d5-47f3-9869-58c8df54ac3d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:27.499191 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (1.637083ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.499946 4678 audit.go:45] 2017-01-25T05:16:27.499933367-05:00 AUDIT: id="6dac6ee0-78d5-47f3-9869-58c8df54ac3d" response="200" I0125 05:16:27.499970 4678 audit.go:125] 2017-01-25T05:16:27.499939178-05:00 AUDIT: id="d0cc75a1-a80f-44a2-a6c7-d80715fc9265" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp" I0125 05:16:27.500010 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (2.605588ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.502810 4678 audit.go:125] 2017-01-25T05:16:27.502786834-05:00 AUDIT: id="1a2ec3cf-ef50-469a-b417-7ea5a331f776" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas" I0125 05:16:27.503138 4678 audit.go:45] 2017-01-25T05:16:27.503129194-05:00 AUDIT: id="d0cc75a1-a80f-44a2-a6c7-d80715fc9265" response="200" I0125 05:16:27.503171 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-dockercfg-4rhpp: (3.475624ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.504311 4678 audit.go:45] 2017-01-25T05:16:27.504296647-05:00 AUDIT: id="1a2ec3cf-ef50-469a-b417-7ea5a331f776" response="200" I0125 05:16:27.504382 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas: (3.680597ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.504807 4678 audit.go:125] 2017-01-25T05:16:27.504775109-05:00 AUDIT: id="9f3d8dcb-aa20-447c-a4b0-7bd283d74c87" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:27.505461 4678 audit.go:125] 2017-01-25T05:16:27.505424226-05:00 AUDIT: id="1da156a4-725c-4483-a0d8-0eb490b1ba1f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer" I0125 05:16:27.506611 4678 audit.go:45] 2017-01-25T05:16:27.506598795-05:00 AUDIT: id="1da156a4-725c-4483-a0d8-0eb490b1ba1f" response="404" I0125 05:16:27.506654 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/deployer: (1.468468ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.506748 4678 audit.go:125] 2017-01-25T05:16:27.506711062-05:00 AUDIT: id="a159e6b2-b390-4fb3-ad40-6bbc4aaffe38" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas" I0125 05:16:27.506935 4678 audit.go:45] 2017-01-25T05:16:27.506922535-05:00 AUDIT: id="9f3d8dcb-aa20-447c-a4b0-7bd283d74c87" response="200" I0125 05:16:27.507106 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.560509ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.507662 4678 audit.go:45] 2017-01-25T05:16:27.507648689-05:00 AUDIT: id="a159e6b2-b390-4fb3-ad40-6bbc4aaffe38" response="200" I0125 05:16:27.507721 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas: (2.91859ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.507942 4678 audit.go:125] 2017-01-25T05:16:27.507911402-05:00 AUDIT: id="be8ed40f-0ab1-4ae2-a393-56e9eefd441d" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02" I0125 05:16:27.508649 4678 audit.go:125] 2017-01-25T05:16:27.508618217-05:00 AUDIT: id="23ca0e64-8137-4c3f-9e73-8c9bd19b547a" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4" I0125 05:16:27.509970 4678 audit.go:45] 2017-01-25T05:16:27.509957366-05:00 AUDIT: id="23ca0e64-8137-4c3f-9e73-8c9bd19b547a" response="404" I0125 05:16:27.510012 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/deployer-token-1ctp4: (1.638435ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.510112 4678 audit.go:125] 2017-01-25T05:16:27.510077592-05:00 AUDIT: id="6b90e69e-b09f-4ae1-8c20-4763a80640b6" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:16:27.511780 4678 audit.go:45] 2017-01-25T05:16:27.511767186-05:00 AUDIT: id="be8ed40f-0ab1-4ae2-a393-56e9eefd441d" response="200" I0125 05:16:27.511828 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-dockercfg-03n02: (4.142509ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.513543 4678 audit.go:125] 2017-01-25T05:16:27.513510938-05:00 AUDIT: id="4292afb8-e7c4-4b02-9da1-b9d477732dc4" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:27.513761 4678 audit.go:125] 2017-01-25T05:16:27.513731828-05:00 AUDIT: id="1695227c-e48b-419a-be89-7c357d24ebd5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default" I0125 05:16:27.514548 4678 audit.go:45] 2017-01-25T05:16:27.514535778-05:00 AUDIT: id="1695227c-e48b-419a-be89-7c357d24ebd5" response="404" I0125 05:16:27.514588 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts/default: (1.074471ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.514981 4678 audit.go:45] 2017-01-25T05:16:27.514967736-05:00 AUDIT: id="4292afb8-e7c4-4b02-9da1-b9d477732dc4" response="200" I0125 05:16:27.515024 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (1.743433ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.515351 4678 audit.go:125] 2017-01-25T05:16:27.515319869-05:00 AUDIT: id="f879905f-a8c5-4624-a7f1-e69bdf211991" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89" I0125 05:16:27.516361 4678 audit.go:45] 2017-01-25T05:16:27.516348549-05:00 AUDIT: id="f879905f-a8c5-4624-a7f1-e69bdf211991" response="404" I0125 05:16:27.516404 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets/default-token-dzq89: (1.322377ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:27.516444 4678 audit.go:45] 2017-01-25T05:16:27.516432611-05:00 AUDIT: id="6b90e69e-b09f-4ae1-8c20-4763a80640b6" response="200" I0125 05:16:27.516716 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (8.3172ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.518393 4678 audit.go:125] 2017-01-25T05:16:27.51836889-05:00 AUDIT: id="48d3f765-499e-4e20-8b0a-d5bb7bd9ba43" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:16:27.519094 4678 audit.go:45] 2017-01-25T05:16:27.519079634-05:00 AUDIT: id="48d3f765-499e-4e20-8b0a-d5bb7bd9ba43" response="200" I0125 05:16:27.519135 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (2.109272ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.521066 4678 audit.go:125] 2017-01-25T05:16:27.521043377-05:00 AUDIT: id="41db3a79-8c38-4a29-9773-1ccc0d20cd61" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services" I0125 05:16:27.521843 4678 audit.go:45] 2017-01-25T05:16:27.521833349-05:00 AUDIT: id="41db3a79-8c38-4a29-9773-1ccc0d20cd61" response="200" I0125 05:16:27.521895 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services: (2.243199ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.523728 4678 audit.go:125] 2017-01-25T05:16:27.523704693-05:00 AUDIT: id="d20b1322-749a-4cf6-8949-c4e4f776efba" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services" I0125 05:16:27.524443 4678 audit.go:45] 2017-01-25T05:16:27.524433895-05:00 AUDIT: id="d20b1322-749a-4cf6-8949-c4e4f776efba" response="200" I0125 05:16:27.524498 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services: (2.172602ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.526402 4678 audit.go:125] 2017-01-25T05:16:27.526379767-05:00 AUDIT: id="39110db0-2d8f-4592-9174-104dcf819ae2" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:16:27.527170 4678 audit.go:45] 2017-01-25T05:16:27.527158541-05:00 AUDIT: id="39110db0-2d8f-4592-9174-104dcf819ae2" response="200" I0125 05:16:27.527240 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (2.30239ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.529297 4678 audit.go:125] 2017-01-25T05:16:27.529272928-05:00 AUDIT: id="453d4e18-9115-41d7-908b-7775d8d089f7" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:16:27.530102 4678 audit.go:45] 2017-01-25T05:16:27.530093074-05:00 AUDIT: id="453d4e18-9115-41d7-908b-7775d8d089f7" response="200" I0125 05:16:27.530153 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (2.392967ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.531850 4678 audit.go:125] 2017-01-25T05:16:27.53182751-05:00 AUDIT: id="e302a9d7-e4be-4caf-abd2-e0ca0da81893" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:16:27.532605 4678 audit.go:45] 2017-01-25T05:16:27.532596706-05:00 AUDIT: id="e302a9d7-e4be-4caf-abd2-e0ca0da81893" response="200" I0125 05:16:27.532651 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (2.104124ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.532866 4678 namespace_controller_utils.go:365] namespace controller - deleteAllContent - namespace: extended-test-postgresql-replication-1-34bbd-xd4g8, estimate: 0 I0125 05:16:27.534603 4678 audit.go:125] 2017-01-25T05:16:27.534576138-05:00 AUDIT: id="c218e79d-6c5e-4f54-99a2-6a42667b5afa" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/finalize" I0125 05:16:27.535731 4678 audit.go:45] 2017-01-25T05:16:27.535718237-05:00 AUDIT: id="c218e79d-6c5e-4f54-99a2-6a42667b5afa" response="200" I0125 05:16:27.535782 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/finalize: (2.623473ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.536373 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (1.973µs) I0125 05:16:27.537927 4678 audit.go:125] 2017-01-25T05:16:27.537905156-05:00 AUDIT: id="9b430726-2e03-4038-9be0-cba4075e675f" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:16:27.540285 4678 audit.go:45] 2017-01-25T05:16:27.540275202-05:00 AUDIT: id="9b430726-2e03-4038-9be0-cba4075e675f" response="200" I0125 05:16:27.540324 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (3.842661ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:27.540817 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (520ns) I0125 05:16:27.684588 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:28.455751 4678 audit.go:125] 2017-01-25T05:16:28.45570597-05:00 AUDIT: id="3a0d3ba7-e16c-4696-a738-cf2506dd28d3" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:28.456142 4678 audit.go:45] 2017-01-25T05:16:28.456129554-05:00 AUDIT: id="3a0d3ba7-e16c-4696-a738-cf2506dd28d3" response="200" I0125 05:16:28.456457 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (942.589µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:29.213261 4678 audit.go:125] 2017-01-25T05:16:29.213224387-05:00 AUDIT: id="6610c821-244b-465d-b9f0-9facdc15d63e" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:16:29.213666 4678 audit.go:45] 2017-01-25T05:16:29.21365592-05:00 AUDIT: id="6610c821-244b-465d-b9f0-9facdc15d63e" response="200" I0125 05:16:29.213951 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (934.342µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:29.260961 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:29.260991 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:29.261594 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:29.261609 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:29.262694 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Connection:[close] Content-Type:[text/html] Cache-Control:[no-cache]] 0xc426da2640 -1 [] true false map[] 0xc42acb3e00 } I0125 05:16:29.262740 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:29.264421 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Cache-Control:[no-cache] Connection:[close] Content-Type:[text/html]] 0xc426da2720 -1 [] true false map[] 0xc42acb3c20 } I0125 05:16:29.264466 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:29.280505 4678 audit.go:125] 2017-01-25T05:16:29.280471204-05:00 AUDIT: id="d039803a-8edf-4402-81c7-0c8492599442" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:16:29.282584 4678 audit.go:45] 2017-01-25T05:16:29.282569791-05:00 AUDIT: id="d039803a-8edf-4402-81c7-0c8492599442" response="200" I0125 05:16:29.282863 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (2.586317ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:29.283660 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:16:29.344896 4678 audit.go:125] 2017-01-25T05:16:29.344860544-05:00 AUDIT: id="e8f989cc-2703-4836-ac55-05bda427dd0f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:16:29.345693 4678 audit.go:45] 2017-01-25T05:16:29.345682571-05:00 AUDIT: id="e8f989cc-2703-4836-ac55-05bda427dd0f" response="200" I0125 05:16:29.345763 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (2.747973ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:29.346023 4678 controller.go:106] Found 0 cronjobs I0125 05:16:29.347805 4678 audit.go:125] 2017-01-25T05:16:29.347786106-05:00 AUDIT: id="acb9706c-828b-4230-bad3-54c2a673a424" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:16:29.348556 4678 audit.go:45] 2017-01-25T05:16:29.348546377-05:00 AUDIT: id="acb9706c-828b-4230-bad3-54c2a673a424" response="200" I0125 05:16:29.348603 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.353818ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:29.348798 4678 controller.go:114] Found 0 jobs I0125 05:16:29.348805 4678 controller.go:117] Found 0 groups I0125 05:16:29.551473 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:29.551489 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:29.639118 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-helper-1-cpv6d I0125 05:16:29.639141 4678 summary.go:383] Missing default interface "eth0" for pod:extended-test-postgresql-replication-1-34bbd-xd4g8_postgresql-master-2-46j9k I0125 05:16:29.639176 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:16:29.684594 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:31.684597 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:31.978791 4678 audit.go:125] 2017-01-25T05:16:31.978752196-05:00 AUDIT: id="4be8c268-1f4f-4ba7-9bae-67ce8310f2cc" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:16:31.979960 4678 audit.go:45] 2017-01-25T05:16:31.979949307-05:00 AUDIT: id="4be8c268-1f4f-4ba7-9bae-67ce8310f2cc" response="200" I0125 05:16:31.980046 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.518361ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:32.255697 4678 audit.go:125] 2017-01-25T05:16:32.255646666-05:00 AUDIT: id="bfd21f73-c6ad-4049-99df-11848850fcac" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8" I0125 05:16:32.256860 4678 audit.go:45] 2017-01-25T05:16:32.256851046-05:00 AUDIT: id="bfd21f73-c6ad-4049-99df-11848850fcac" response="404" I0125 05:16:32.256930 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8: (1.53347ms) 404 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.257677 4678 audit.go:125] 2017-01-25T05:16:32.25765188-05:00 AUDIT: id="a2d82230-6953-4692-8580-9d56988aa182" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api" I0125 05:16:32.257794 4678 audit.go:45] 2017-01-25T05:16:32.257786451-05:00 AUDIT: id="a2d82230-6953-4692-8580-9d56988aa182" response="200" I0125 05:16:32.257833 4678 panics.go:76] GET /api: (374.942µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.258425 4678 audit.go:125] 2017-01-25T05:16:32.258403458-05:00 AUDIT: id="20562e27-a3cd-453f-b524-3bda56df6d3e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis" I0125 05:16:32.258554 4678 audit.go:45] 2017-01-25T05:16:32.258547154-05:00 AUDIT: id="20562e27-a3cd-453f-b524-3bda56df6d3e" response="200" I0125 05:16:32.258607 4678 panics.go:76] GET /apis: (386.78µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.259175 4678 audit.go:125] 2017-01-25T05:16:32.25915365-05:00 AUDIT: id="8a3a894b-2ed9-41d9-b038-fc6915b73195" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/apps/v1beta1" I0125 05:16:32.259318 4678 audit.go:45] 2017-01-25T05:16:32.259310073-05:00 AUDIT: id="8a3a894b-2ed9-41d9-b038-fc6915b73195" response="200" I0125 05:16:32.259345 4678 panics.go:76] GET /apis/apps/v1beta1: (349.544µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.259957 4678 audit.go:125] 2017-01-25T05:16:32.25993461-05:00 AUDIT: id="7641bc92-127b-4d5f-9a03-6a5e087f07f1" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/authentication.k8s.io/v1beta1" I0125 05:16:32.260069 4678 audit.go:45] 2017-01-25T05:16:32.260062537-05:00 AUDIT: id="7641bc92-127b-4d5f-9a03-6a5e087f07f1" response="200" I0125 05:16:32.260094 4678 panics.go:76] GET /apis/authentication.k8s.io/v1beta1: (339.771µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.260699 4678 audit.go:125] 2017-01-25T05:16:32.26067542-05:00 AUDIT: id="ccc32bf6-ce46-4b66-a738-c57e6f41c927" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/autoscaling/v1" I0125 05:16:32.260799 4678 audit.go:45] 2017-01-25T05:16:32.260791705-05:00 AUDIT: id="ccc32bf6-ce46-4b66-a738-c57e6f41c927" response="200" I0125 05:16:32.260822 4678 panics.go:76] GET /apis/autoscaling/v1: (315.075µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.261397 4678 audit.go:125] 2017-01-25T05:16:32.261379353-05:00 AUDIT: id="4a9641d1-0776-485c-9414-67c16f3d17e3" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/batch/v1" I0125 05:16:32.261496 4678 audit.go:45] 2017-01-25T05:16:32.261488893-05:00 AUDIT: id="4a9641d1-0776-485c-9414-67c16f3d17e3" response="200" I0125 05:16:32.261516 4678 panics.go:76] GET /apis/batch/v1: (321.485µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.262086 4678 audit.go:125] 2017-01-25T05:16:32.2620678-05:00 AUDIT: id="047acee3-1233-482e-a5c1-5b75cfec4b97" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1" I0125 05:16:32.262221 4678 audit.go:45] 2017-01-25T05:16:32.262195034-05:00 AUDIT: id="047acee3-1233-482e-a5c1-5b75cfec4b97" response="200" I0125 05:16:32.262249 4678 panics.go:76] GET /apis/batch/v2alpha1: (349.495µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.262806 4678 audit.go:125] 2017-01-25T05:16:32.262787619-05:00 AUDIT: id="eec64da9-0b58-4a31-a0b6-2d77818184c7" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/certificates.k8s.io/v1alpha1" I0125 05:16:32.262899 4678 audit.go:45] 2017-01-25T05:16:32.262892317-05:00 AUDIT: id="eec64da9-0b58-4a31-a0b6-2d77818184c7" response="200" I0125 05:16:32.262933 4678 panics.go:76] GET /apis/certificates.k8s.io/v1alpha1: (314.274µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.263492 4678 audit.go:125] 2017-01-25T05:16:32.26346951-05:00 AUDIT: id="d0d99911-c847-4152-a4e0-5a1c18f173ed" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1" I0125 05:16:32.263635 4678 audit.go:45] 2017-01-25T05:16:32.263627663-05:00 AUDIT: id="d0d99911-c847-4152-a4e0-5a1c18f173ed" response="200" I0125 05:16:32.263698 4678 panics.go:76] GET /apis/extensions/v1beta1: (406.974µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.264350 4678 audit.go:125] 2017-01-25T05:16:32.264327523-05:00 AUDIT: id="5bcbb669-6b7c-4bd0-bfe9-6dff2bde72c5" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/policy/v1beta1" I0125 05:16:32.264470 4678 audit.go:45] 2017-01-25T05:16:32.264460679-05:00 AUDIT: id="5bcbb669-6b7c-4bd0-bfe9-6dff2bde72c5" response="200" I0125 05:16:32.264502 4678 panics.go:76] GET /apis/policy/v1beta1: (361.314µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.265106 4678 audit.go:125] 2017-01-25T05:16:32.265087604-05:00 AUDIT: id="fe34e1f0-3bf9-46cf-a058-2855373543fc" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/storage.k8s.io/v1beta1" I0125 05:16:32.265214 4678 audit.go:45] 2017-01-25T05:16:32.265206988-05:00 AUDIT: id="fe34e1f0-3bf9-46cf-a058-2855373543fc" response="200" I0125 05:16:32.265237 4678 panics.go:76] GET /apis/storage.k8s.io/v1beta1: (327.704µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.265829 4678 audit.go:125] 2017-01-25T05:16:32.26580689-05:00 AUDIT: id="d37d948c-b091-48fc-b7a9-fae7d871a760" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1" I0125 05:16:32.265976 4678 audit.go:45] 2017-01-25T05:16:32.265968645-05:00 AUDIT: id="d37d948c-b091-48fc-b7a9-fae7d871a760" response="200" I0125 05:16:32.266045 4678 panics.go:76] GET /api/v1: (419.843µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.266885 4678 audit.go:125] 2017-01-25T05:16:32.266863192-05:00 AUDIT: id="9a015f9d-ac33-406f-baea-1b6c6235bc16" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets" I0125 05:16:32.267864 4678 audit.go:45] 2017-01-25T05:16:32.267853913-05:00 AUDIT: id="9a015f9d-ac33-406f-baea-1b6c6235bc16" response="200" I0125 05:16:32.267931 4678 panics.go:76] GET /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/statefulsets: (1.25691ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.268728 4678 audit.go:125] 2017-01-25T05:16:32.268699898-05:00 AUDIT: id="0e63e6f9-72f8-4405-93d5-4855cd3e4711" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers" I0125 05:16:32.269491 4678 audit.go:45] 2017-01-25T05:16:32.269481543-05:00 AUDIT: id="0e63e6f9-72f8-4405-93d5-4855cd3e4711" response="200" I0125 05:16:32.269548 4678 panics.go:76] GET /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers: (1.029994ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.270272 4678 audit.go:125] 2017-01-25T05:16:32.270240412-05:00 AUDIT: id="8fcfd0ac-ea58-4f28-b6a0-93420d0d30ce" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs" I0125 05:16:32.271062 4678 audit.go:45] 2017-01-25T05:16:32.271048843-05:00 AUDIT: id="8fcfd0ac-ea58-4f28-b6a0-93420d0d30ce" response="200" I0125 05:16:32.271138 4678 panics.go:76] GET /apis/batch/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs: (1.080609ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.271837 4678 audit.go:125] 2017-01-25T05:16:32.271807234-05:00 AUDIT: id="c8852905-0609-4b06-8ad2-d154399ac090" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/cronjobs" I0125 05:16:32.272641 4678 audit.go:45] 2017-01-25T05:16:32.272630785-05:00 AUDIT: id="c8852905-0609-4b06-8ad2-d154399ac090" response="200" I0125 05:16:32.272710 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/cronjobs: (1.047529ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.273391 4678 audit.go:125] 2017-01-25T05:16:32.273369568-05:00 AUDIT: id="4ebbc653-38d1-45f5-ad63-e53b892be105" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/scheduledjobs" I0125 05:16:32.274122 4678 audit.go:45] 2017-01-25T05:16:32.274113118-05:00 AUDIT: id="4ebbc653-38d1-45f5-ad63-e53b892be105" response="200" I0125 05:16:32.274179 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/scheduledjobs: (989.829µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.274866 4678 audit.go:125] 2017-01-25T05:16:32.274844014-05:00 AUDIT: id="93b30b43-1e97-4c3a-a33b-1c2e3d20ccd2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/daemonsets" I0125 05:16:32.275661 4678 audit.go:45] 2017-01-25T05:16:32.275651102-05:00 AUDIT: id="93b30b43-1e97-4c3a-a33b-1c2e3d20ccd2" response="200" I0125 05:16:32.275713 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/daemonsets: (1.019465ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.276381 4678 audit.go:125] 2017-01-25T05:16:32.276359481-05:00 AUDIT: id="7aff6af9-7a87-4f8c-a0dd-9e3e36ee73b9" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments" I0125 05:16:32.277093 4678 audit.go:45] 2017-01-25T05:16:32.277083909-05:00 AUDIT: id="7aff6af9-7a87-4f8c-a0dd-9e3e36ee73b9" response="200" I0125 05:16:32.277154 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/deployments: (975.434µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.277810 4678 audit.go:125] 2017-01-25T05:16:32.277787926-05:00 AUDIT: id="02b2c4b5-c795-4985-a3e0-15cdd46c471d" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers" I0125 05:16:32.278604 4678 audit.go:45] 2017-01-25T05:16:32.278594777-05:00 AUDIT: id="02b2c4b5-c795-4985-a3e0-15cdd46c471d" response="200" I0125 05:16:32.278655 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/horizontalpodautoscalers: (1.021657ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.279308 4678 audit.go:125] 2017-01-25T05:16:32.279285608-05:00 AUDIT: id="9e368d6c-f947-43ef-884d-c24f9f9a663c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/ingresses" I0125 05:16:32.279951 4678 audit.go:45] 2017-01-25T05:16:32.279941642-05:00 AUDIT: id="9e368d6c-f947-43ef-884d-c24f9f9a663c" response="200" I0125 05:16:32.280004 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/ingresses: (912.52µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.280643 4678 audit.go:125] 2017-01-25T05:16:32.280615727-05:00 AUDIT: id="7d856d13-de10-4fae-b2f0-c4c5f4aff603" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs" I0125 05:16:32.281389 4678 audit.go:45] 2017-01-25T05:16:32.281379913-05:00 AUDIT: id="7d856d13-de10-4fae-b2f0-c4c5f4aff603" response="200" I0125 05:16:32.281450 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/jobs: (1.000743ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.282108 4678 audit.go:125] 2017-01-25T05:16:32.282086506-05:00 AUDIT: id="d3603704-bdf1-47e6-9b90-06474a10cffa" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/networkpolicies" I0125 05:16:32.282905 4678 audit.go:45] 2017-01-25T05:16:32.282895825-05:00 AUDIT: id="d3603704-bdf1-47e6-9b90-06474a10cffa" response="200" I0125 05:16:32.282960 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/networkpolicies: (1.049291ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.283615 4678 audit.go:125] 2017-01-25T05:16:32.283593704-05:00 AUDIT: id="6824d836-59a1-4930-91d2-dbd1b7330566" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets" I0125 05:16:32.284378 4678 audit.go:45] 2017-01-25T05:16:32.284359999-05:00 AUDIT: id="6824d836-59a1-4930-91d2-dbd1b7330566" response="200" I0125 05:16:32.284435 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicasets: (1.009606ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.285084 4678 audit.go:125] 2017-01-25T05:16:32.28506232-05:00 AUDIT: id="105b06f2-598d-425d-bca4-4d33bb5283d2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:32.285219 4678 audit.go:45] 2017-01-25T05:16:32.285193229-05:00 AUDIT: id="105b06f2-598d-425d-bca4-4d33bb5283d2" response="404" I0125 05:16:32.285259 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (362.5µs) 404 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.285988 4678 audit.go:125] 2017-01-25T05:16:32.285961532-05:00 AUDIT: id="26e65ec1-ac12-4698-b031-c70ab8f1ebcd" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/poddisruptionbudgets" I0125 05:16:32.286774 4678 audit.go:45] 2017-01-25T05:16:32.286764036-05:00 AUDIT: id="26e65ec1-ac12-4698-b031-c70ab8f1ebcd" response="200" I0125 05:16:32.286830 4678 panics.go:76] GET /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/poddisruptionbudgets: (1.037504ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.287628 4678 audit.go:125] 2017-01-25T05:16:32.287605403-05:00 AUDIT: id="b8446f1d-bbf0-46b3-9aec-8416f98e3983" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/configmaps" I0125 05:16:32.288489 4678 audit.go:45] 2017-01-25T05:16:32.288479562-05:00 AUDIT: id="b8446f1d-bbf0-46b3-9aec-8416f98e3983" response="200" I0125 05:16:32.288542 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/configmaps: (1.114609ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.289247 4678 audit.go:125] 2017-01-25T05:16:32.289218132-05:00 AUDIT: id="f38f4a0d-5792-4e0a-8f18-32a9715fb6c2" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints" I0125 05:16:32.290017 4678 audit.go:45] 2017-01-25T05:16:32.29000731-05:00 AUDIT: id="f38f4a0d-5792-4e0a-8f18-32a9715fb6c2" response="200" I0125 05:16:32.290068 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/endpoints: (1.074327ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.290739 4678 audit.go:125] 2017-01-25T05:16:32.290716468-05:00 AUDIT: id="e6b114e8-3941-458e-b34d-3f9b05511ea9" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events" I0125 05:16:32.291592 4678 audit.go:45] 2017-01-25T05:16:32.291582524-05:00 AUDIT: id="e6b114e8-3941-458e-b34d-3f9b05511ea9" response="200" I0125 05:16:32.291642 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/events: (1.110418ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.292319 4678 audit.go:125] 2017-01-25T05:16:32.292296786-05:00 AUDIT: id="74d62442-7407-4738-8b98-ccced96cf485" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges" I0125 05:16:32.293100 4678 audit.go:45] 2017-01-25T05:16:32.293090424-05:00 AUDIT: id="74d62442-7407-4738-8b98-ccced96cf485" response="200" I0125 05:16:32.293150 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/limitranges: (1.023167ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.293840 4678 audit.go:125] 2017-01-25T05:16:32.293818468-05:00 AUDIT: id="3c318484-d3d5-417a-b07e-95f825691650" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims" I0125 05:16:32.294630 4678 audit.go:45] 2017-01-25T05:16:32.294616646-05:00 AUDIT: id="3c318484-d3d5-417a-b07e-95f825691650" response="200" I0125 05:16:32.294683 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/persistentvolumeclaims: (1.046708ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.295362 4678 audit.go:125] 2017-01-25T05:16:32.295338271-05:00 AUDIT: id="35f65c0a-4f0b-4779-92c3-4b4d7a09f7a0" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods" I0125 05:16:32.296119 4678 audit.go:45] 2017-01-25T05:16:32.29610959-05:00 AUDIT: id="35f65c0a-4f0b-4779-92c3-4b4d7a09f7a0" response="200" I0125 05:16:32.296173 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/pods: (1.017115ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.296841 4678 audit.go:125] 2017-01-25T05:16:32.296818316-05:00 AUDIT: id="51a48574-7d1b-40b2-9cf2-38c7bea51da0" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/podtemplates" I0125 05:16:32.297584 4678 audit.go:45] 2017-01-25T05:16:32.2975735-05:00 AUDIT: id="51a48574-7d1b-40b2-9cf2-38c7bea51da0" response="200" I0125 05:16:32.297647 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/podtemplates: (984.178µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.298323 4678 audit.go:125] 2017-01-25T05:16:32.298301152-05:00 AUDIT: id="82308b21-fc56-4dee-bffa-c0dd972e8316" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers" I0125 05:16:32.299137 4678 audit.go:45] 2017-01-25T05:16:32.299127474-05:00 AUDIT: id="82308b21-fc56-4dee-bffa-c0dd972e8316" response="200" I0125 05:16:32.299188 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/replicationcontrollers: (1.044608ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.299867 4678 audit.go:125] 2017-01-25T05:16:32.299838055-05:00 AUDIT: id="2f76e60f-8280-4635-b5d6-4df94b1fb3e8" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas" I0125 05:16:32.300657 4678 audit.go:45] 2017-01-25T05:16:32.300647456-05:00 AUDIT: id="2f76e60f-8280-4635-b5d6-4df94b1fb3e8" response="200" I0125 05:16:32.300736 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/resourcequotas: (1.062123ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.301410 4678 audit.go:125] 2017-01-25T05:16:32.301387198-05:00 AUDIT: id="39de8582-9aff-41c1-8c58-822e984a9f41" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets" I0125 05:16:32.302181 4678 audit.go:45] 2017-01-25T05:16:32.302171947-05:00 AUDIT: id="39de8582-9aff-41c1-8c58-822e984a9f41" response="200" I0125 05:16:32.302237 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/secrets: (1.02262ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.302871 4678 audit.go:125] 2017-01-25T05:16:32.30284948-05:00 AUDIT: id="b69a4765-e86d-4405-bdcf-56d11ab8625b" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts" I0125 05:16:32.303678 4678 audit.go:45] 2017-01-25T05:16:32.30366838-05:00 AUDIT: id="b69a4765-e86d-4405-bdcf-56d11ab8625b" response="200" I0125 05:16:32.303735 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/serviceaccounts: (1.059422ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.304419 4678 audit.go:125] 2017-01-25T05:16:32.304397531-05:00 AUDIT: id="ec738fca-3572-41d8-bde6-65dbd82b8f4f" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-1-34bbd-xd4g8" uri="/api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services" I0125 05:16:32.305260 4678 audit.go:45] 2017-01-25T05:16:32.305251025-05:00 AUDIT: id="ec738fca-3572-41d8-bde6-65dbd82b8f4f" response="200" I0125 05:16:32.305312 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-1-34bbd-xd4g8/services: (1.071709ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.305966 4678 audit.go:125] 2017-01-25T05:16:32.305943167-05:00 AUDIT: id="0093fa03-ed55-44a9-8d0c-d746d3382fe9" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events" I0125 05:16:32.306686 4678 audit.go:45] 2017-01-25T05:16:32.306676366-05:00 AUDIT: id="0093fa03-ed55-44a9-8d0c-d746d3382fe9" response="200" I0125 05:16:32.306730 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events: (959.545µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.307355 4678 audit.go:125] 2017-01-25T05:16:32.307319978-05:00 AUDIT: id="359ebf0d-60e6-442f-8b5c-c3522e80c616" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:32.308302 4678 audit.go:45] 2017-01-25T05:16:32.308291908-05:00 AUDIT: id="359ebf0d-60e6-442f-8b5c-c3522e80c616" response="200" I0125 05:16:32.308537 4678 panics.go:76] GET /api/v1/nodes: (1.385452ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.309072 4678 audit.go:125] 2017-01-25T05:16:32.309054288-05:00 AUDIT: id="75d2f463-1d38-4308-bcba-03c31718d505" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/pods" I0125 05:16:32.310035 4678 audit.go:45] 2017-01-25T05:16:32.31002532-05:00 AUDIT: id="75d2f463-1d38-4308-bcba-03c31718d505" response="200" I0125 05:16:32.310298 4678 panics.go:76] GET /api/v1/pods: (1.406677ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.310990 4678 audit.go:125] 2017-01-25T05:16:32.31096924-05:00 AUDIT: id="def8000c-04ff-4b15-bce8-d1dc3cc180ce" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:32.311897 4678 audit.go:45] 2017-01-25T05:16:32.311887396-05:00 AUDIT: id="def8000c-04ff-4b15-bce8-d1dc3cc180ce" response="200" I0125 05:16:32.312109 4678 panics.go:76] GET /api/v1/nodes: (1.308024ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.312680 4678 audit.go:125] 2017-01-25T05:16:32.312661814-05:00 AUDIT: id="551f912c-f9e0-4f44-8986-41eec5988d45" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222" I0125 05:16:32.313525 4678 audit.go:45] 2017-01-25T05:16:32.313515595-05:00 AUDIT: id="551f912c-f9e0-4f44-8986-41eec5988d45" response="200" I0125 05:16:32.313732 4678 panics.go:76] GET /api/v1/nodes/172.18.7.222: (1.216558ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.314365 4678 audit.go:125] 2017-01-25T05:16:32.314342173-05:00 AUDIT: id="08e39e35-45ac-4ce4-89fe-4d712ccd5041" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="kube-system" uri="/api/v1/namespaces/kube-system/events?fieldSelector=involvedObject.kind%3DNode%2CinvolvedObject.name%3D172.18.7.222%2CinvolvedObject.namespace%3D%2Csource%3Dkubelet" I0125 05:16:32.315031 4678 audit.go:45] 2017-01-25T05:16:32.31502146-05:00 AUDIT: id="08e39e35-45ac-4ce4-89fe-4d712ccd5041" response="200" I0125 05:16:32.315070 4678 panics.go:76] GET /api/v1/namespaces/kube-system/events?fieldSelector=involvedObject.kind%3DNode%2CinvolvedObject.name%3D172.18.7.222%2CinvolvedObject.namespace%3D%2Csource%3Dkubelet: (900.177µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.315672 4678 audit.go:125] 2017-01-25T05:16:32.315653493-05:00 AUDIT: id="6db52e6d-cf63-4157-b07b-f88529c0bb2f" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/version" I0125 05:16:32.315771 4678 audit.go:45] 2017-01-25T05:16:32.315763922-05:00 AUDIT: id="6db52e6d-cf63-4157-b07b-f88529c0bb2f" response="200" I0125 05:16:32.315786 4678 panics.go:76] GET /version: (308.199µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.316422 4678 audit.go:125] 2017-01-25T05:16:32.316403399-05:00 AUDIT: id="458564ed-e109-467b-849e-a31e471d74bf" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222:10250/proxy/pods" I0125 05:16:32.317623 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"get", APIVersion:"v1", APIGroup:"", Resource:"nodes/proxy", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/pods"} I0125 05:16:32.317999 4678 server.go:744] GET /pods: (538.139µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:52858] I0125 05:16:32.318068 4678 audit.go:45] 2017-01-25T05:16:32.31805495-05:00 AUDIT: id="458564ed-e109-467b-849e-a31e471d74bf" response="200" I0125 05:16:32.318291 4678 panics.go:76] GET /api/v1/nodes/172.18.7.222:10250/proxy/pods: (2.032583ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.319385 4678 audit.go:125] 2017-01-25T05:16:32.319366729-05:00 AUDIT: id="731b7193-857e-4c88-8f7e-3ad6690069e7" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:32.320303 4678 audit.go:45] 2017-01-25T05:16:32.320285521-05:00 AUDIT: id="731b7193-857e-4c88-8f7e-3ad6690069e7" response="200" I0125 05:16:32.320513 4678 panics.go:76] GET /api/v1/nodes: (1.319218ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.321107 4678 audit.go:125] 2017-01-25T05:16:32.321088861-05:00 AUDIT: id="9bb11256-0120-481c-bfce-fffa42ee3c81" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222" I0125 05:16:32.321849 4678 audit.go:45] 2017-01-25T05:16:32.321839853-05:00 AUDIT: id="9bb11256-0120-481c-bfce-fffa42ee3c81" response="200" I0125 05:16:32.322060 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222: (1.133853ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.322639 4678 audit.go:125] 2017-01-25T05:16:32.32261926-05:00 AUDIT: id="53dc56d9-84dd-4840-a2ba-6a6d9bdb51ea" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/proxy/nodes/172.18.7.222:10250/metrics" I0125 05:16:32.323471 4678 proxy.go:187] [7ba13d0c158bb43] Beginning proxy /api/v1/proxy/nodes/172.18.7.222:10250/metrics... I0125 05:16:32.323719 4678 node_auth.go:143] Node request attributes: namespace=, user=&user.DefaultInfo{Name:"system:openshift-node-admin", UID:"", Groups:[]string{"system:node-admins", "system:authenticated"}, Extra:map[string][]string(nil)}, attrs=authorizer.DefaultAuthorizationAttributes{Verb:"get", APIVersion:"v1", APIGroup:"", Resource:"nodes/metrics", ResourceName:"172.18.7.222", RequestAttributes:interface {}(nil), NonResourceURL:false, URL:"/metrics"} I0125 05:16:32.416075 4678 server.go:744] GET /metrics: (92.460434ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:52858] I0125 05:16:32.416075 4678 audit.go:45] 2017-01-25T05:16:32.416055074-05:00 AUDIT: id="53dc56d9-84dd-4840-a2ba-6a6d9bdb51ea" response="200" I0125 05:16:32.416577 4678 proxy.go:189] [7ba13d0c158bb43] Proxy /api/v1/proxy/nodes/172.18.7.222:10250/metrics finished 93.104378ms. I0125 05:16:32.416635 4678 panics.go:76] GET /api/v1/proxy/nodes/172.18.7.222:10250/metrics: (94.17632ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.466893 4678 audit.go:125] 2017-01-25T05:16:32.46685613-05:00 AUDIT: id="c835a953-b323-43e8-b8ee-f7ac94e1dfcb" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="kube-system" uri="/api/v1/namespaces/kube-system/pods?labelSelector=name%3De2e-image-puller" I0125 05:16:32.468115 4678 audit.go:45] 2017-01-25T05:16:32.468102115-05:00 AUDIT: id="c835a953-b323-43e8-b8ee-f7ac94e1dfcb" response="200" I0125 05:16:32.468179 4678 panics.go:76] GET /api/v1/namespaces/kube-system/pods?labelSelector=name%3De2e-image-puller: (1.54976ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.468866 4678 audit.go:125] 2017-01-25T05:16:32.468840148-05:00 AUDIT: id="768ef398-c93d-4052-8e06-37882dd67ed7" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1/nodes" I0125 05:16:32.469844 4678 audit.go:45] 2017-01-25T05:16:32.469834382-05:00 AUDIT: id="768ef398-c93d-4052-8e06-37882dd67ed7" response="200" I0125 05:16:32.470078 4678 panics.go:76] GET /api/v1/nodes: (1.39405ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.470630 4678 audit.go:125] 2017-01-25T05:16:32.470608023-05:00 AUDIT: id="d921ded0-65b3-4756-af5a-c0e575000633" ip="172.18.7.222" method="DELETE" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:16:32.472411 4678 audit.go:45] 2017-01-25T05:16:32.47240133-05:00 AUDIT: id="d921ded0-65b3-4756-af5a-c0e575000633" response="200" I0125 05:16:32.472451 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (1.995296ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.473291 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (2.245µs) I0125 05:16:32.473986 4678 audit.go:125] 2017-01-25T05:16:32.473954099-05:00 AUDIT: id="812b17dc-8889-46c3-b441-3e75403949ca" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/buildconfigs" I0125 05:16:32.473997 4678 audit.go:125] 2017-01-25T05:16:32.473969109-05:00 AUDIT: id="9fbe1beb-8423-4983-a8ed-64355ab48ec6" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:16:32.474815 4678 audit.go:45] 2017-01-25T05:16:32.47480257-05:00 AUDIT: id="9fbe1beb-8423-4983-a8ed-64355ab48ec6" response="200" I0125 05:16:32.474875 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (1.158132ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:32.475068 4678 audit.go:45] 2017-01-25T05:16:32.475055495-05:00 AUDIT: id="812b17dc-8889-46c3-b441-3e75403949ca" response="200" I0125 05:16:32.475125 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/buildconfigs: (1.404326ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.475783 4678 audit.go:125] 2017-01-25T05:16:32.475761986-05:00 AUDIT: id="e84624ab-2108-481e-977d-f558079f6fec" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/builds" I0125 05:16:32.545505 4678 audit.go:45] 2017-01-25T05:16:32.545467293-05:00 AUDIT: id="e84624ab-2108-481e-977d-f558079f6fec" response="200" I0125 05:16:32.545624 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/builds: (70.008854ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.546000 4678 namespace_controller.go:197] Namespace has been deleted extended-test-postgresql-replication-1-34bbd-xd4g8 I0125 05:16:32.546023 4678 namespace_controller.go:198] Finished syncing namespace "extended-test-postgresql-replication-1-34bbd-xd4g8" (413ns) I0125 05:16:32.547386 4678 audit.go:125] 2017-01-25T05:16:32.547342344-05:00 AUDIT: id="97947064-ce86-4ae9-8b34-595dfa39b7d0" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deploymentconfigs" I0125 05:16:32.549031 4678 audit.go:45] 2017-01-25T05:16:32.549015068-05:00 AUDIT: id="97947064-ce86-4ae9-8b34-595dfa39b7d0" response="200" I0125 05:16:32.549128 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deploymentconfigs: (2.081718ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.560065 4678 audit.go:125] 2017-01-25T05:16:32.560025378-05:00 AUDIT: id="9062ef6b-f726-40ca-8e8f-540768b476d5" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/egressnetworkpolicies" I0125 05:16:32.561579 4678 audit.go:45] 2017-01-25T05:16:32.561555017-05:00 AUDIT: id="9062ef6b-f726-40ca-8e8f-540768b476d5" response="200" I0125 05:16:32.561666 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/egressnetworkpolicies: (1.886592ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.562599 4678 audit.go:125] 2017-01-25T05:16:32.562560444-05:00 AUDIT: id="420ebb97-f73c-4c6a-a5fb-2c5431b383e7" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/imagestreams" I0125 05:16:32.564004 4678 audit.go:45] 2017-01-25T05:16:32.563988373-05:00 AUDIT: id="420ebb97-f73c-4c6a-a5fb-2c5431b383e7" response="200" I0125 05:16:32.564079 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/imagestreams: (1.749658ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.565016 4678 audit.go:125] 2017-01-25T05:16:32.564976416-05:00 AUDIT: id="abad680f-b500-42ef-b6a6-e230cc37a432" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/policies" I0125 05:16:32.566491 4678 audit.go:45] 2017-01-25T05:16:32.56647493-05:00 AUDIT: id="abad680f-b500-42ef-b6a6-e230cc37a432" response="200" I0125 05:16:32.566580 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/policies: (1.861862ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.567522 4678 audit.go:125] 2017-01-25T05:16:32.567477305-05:00 AUDIT: id="8ef807ea-fd8b-47b3-957a-62ff7ead232f" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/policybindings" I0125 05:16:32.569144 4678 audit.go:45] 2017-01-25T05:16:32.569126916-05:00 AUDIT: id="8ef807ea-fd8b-47b3-957a-62ff7ead232f" response="200" I0125 05:16:32.569313 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/policybindings: (2.082667ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.570373 4678 audit.go:125] 2017-01-25T05:16:32.570333525-05:00 AUDIT: id="0958214c-38e1-416f-9ecf-709cdba46296" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/policybindings/:default" I0125 05:16:32.574495 4678 audit.go:45] 2017-01-25T05:16:32.574477524-05:00 AUDIT: id="0958214c-38e1-416f-9ecf-709cdba46296" response="200" I0125 05:16:32.574560 4678 panics.go:76] DELETE /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/policybindings/:default: (4.500748ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.575472 4678 audit.go:125] 2017-01-25T05:16:32.575433518-05:00 AUDIT: id="b93d673c-e9dc-48d3-83f5-c595d97f28bf" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings" I0125 05:16:32.576799 4678 audit.go:45] 2017-01-25T05:16:32.576783124-05:00 AUDIT: id="b93d673c-e9dc-48d3-83f5-c595d97f28bf" response="200" I0125 05:16:32.576889 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/rolebindings: (1.69775ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.577817 4678 audit.go:125] 2017-01-25T05:16:32.577779761-05:00 AUDIT: id="8c4815cb-38ef-48e8-9550-e694c4cd0bcb" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/roles" I0125 05:16:32.579129 4678 audit.go:45] 2017-01-25T05:16:32.57911325-05:00 AUDIT: id="8c4815cb-38ef-48e8-9550-e694c4cd0bcb" response="200" I0125 05:16:32.579228 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/roles: (1.701818ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.580153 4678 audit.go:125] 2017-01-25T05:16:32.580113482-05:00 AUDIT: id="7d6ba0f9-f22e-4854-b9ea-6ac216eeb998" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/routes" I0125 05:16:32.581962 4678 audit.go:45] 2017-01-25T05:16:32.581946052-05:00 AUDIT: id="7d6ba0f9-f22e-4854-b9ea-6ac216eeb998" response="200" I0125 05:16:32.582040 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/routes: (2.1776ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.582977 4678 audit.go:125] 2017-01-25T05:16:32.582932643-05:00 AUDIT: id="daeb0670-3bd1-4903-a2db-77f10d9fb634" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/templates" I0125 05:16:32.584347 4678 audit.go:45] 2017-01-25T05:16:32.584330728-05:00 AUDIT: id="daeb0670-3bd1-4903-a2db-77f10d9fb634" response="200" I0125 05:16:32.584421 4678 panics.go:76] GET /oapi/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/templates: (1.729474ms) 200 [[openshift/v1.5.0 (linux/amd64) openshift/86a9783] 172.18.7.222:50794] I0125 05:16:32.585574 4678 audit.go:125] 2017-01-25T05:16:32.585528481-05:00 AUDIT: id="41dc6980-f471-4dd6-9976-4e31ea176dbc" ip="172.18.7.222" method="PUT" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/finalize" I0125 05:16:32.588073 4678 audit.go:45] 2017-01-25T05:16:32.588056703-05:00 AUDIT: id="41dc6980-f471-4dd6-9976-4e31ea176dbc" response="200" I0125 05:16:32.588149 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/finalize: (2.860845ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:32.589491 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (3.167µs) I0125 05:16:32.967212 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:32.967240 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:32.967993 4678 prober.go:159] HTTP-Probe Host: http://172.17.0.3, Port: 5000, Path: /healthz I0125 05:16:32.968010 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:32.968191 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42b5974c0 0 [] true false map[] 0xc42bb400f0 } I0125 05:16:32.968263 4678 prober.go:113] Liveness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:32.969106 4678 http.go:82] Probe succeeded for http://172.17.0.3:5000/healthz, Response: {200 OK 200 HTTP/1.1 1 1 map[Cache-Control:[no-cache] Date:[Wed, 25 Jan 2017 10:16:32 GMT] Content-Length:[0] Content-Type:[text/plain; charset=utf-8]] 0xc42b597600 0 [] true false map[] 0xc42f708000 } I0125 05:16:32.969140 4678 prober.go:113] Readiness probe for "docker-registry-1-xppm3_default(e932e61a-e2d9-11e6-a4b0-0e6a5cbf0094):registry" succeeded I0125 05:16:33.457571 4678 audit.go:125] 2017-01-25T05:16:33.457541618-05:00 AUDIT: id="8cc36056-3155-42d5-ba35-7817c1b90d26" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:33.457954 4678 audit.go:45] 2017-01-25T05:16:33.457943569-05:00 AUDIT: id="8cc36056-3155-42d5-ba35-7817c1b90d26" response="200" I0125 05:16:33.458254 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (907.401µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:33.458580 4678 nodecontroller.go:713] Node 172.18.7.222 ReadyCondition updated. Updating timestamp. I0125 05:16:33.573969 4678 reflector.go:273] pkg/controller/endpoint/endpoints_controller.go:160: forcing resync I0125 05:16:33.574164 4678 endpoints_controller.go:334] Finished syncing service "default/kubernetes" endpoints. (2.022µs) I0125 05:16:33.577057 4678 audit.go:125] 2017-01-25T05:16:33.577018883-05:00 AUDIT: id="b5f599af-c535-4ea7-b15d-97d6e00e0f35" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/router" I0125 05:16:33.577057 4678 audit.go:125] 2017-01-25T05:16:33.577027763-05:00 AUDIT: id="72d73dfd-868f-48b8-aefa-67d06f7ab022" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:endpoint-controller" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/endpoints/docker-registry" I0125 05:16:33.578091 4678 audit.go:45] 2017-01-25T05:16:33.57807698-05:00 AUDIT: id="b5f599af-c535-4ea7-b15d-97d6e00e0f35" response="200" I0125 05:16:33.578097 4678 audit.go:45] 2017-01-25T05:16:33.57808839-05:00 AUDIT: id="72d73dfd-868f-48b8-aefa-67d06f7ab022" response="200" I0125 05:16:33.578166 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/docker-registry: (3.442149ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:33.578285 4678 panics.go:76] GET /api/v1/namespaces/default/endpoints/router: (3.565498ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:endpoint-controller] 172.18.7.222:50846] I0125 05:16:33.578517 4678 endpoints_controller.go:334] Finished syncing service "default/docker-registry" endpoints. (4.462753ms) I0125 05:16:33.578537 4678 endpoints_controller.go:334] Finished syncing service "default/router" endpoints. (4.347506ms) I0125 05:16:33.681087 4678 reflector.go:273] pkg/controller/podautoscaler/horizontal.go:133: forcing resync I0125 05:16:33.684588 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:33.762714 4678 reflector.go:273] pkg/controller/disruption/disruption.go:284: forcing resync I0125 05:16:33.762893 4678 reflector.go:273] pkg/controller/disruption/disruption.go:281: forcing resync I0125 05:16:33.762905 4678 reflector.go:273] pkg/controller/disruption/disruption.go:283: forcing resync I0125 05:16:33.785235 4678 reflector.go:273] github.com/openshift/origin/pkg/project/controller/factory.go:36: forcing resync I0125 05:16:33.817252 4678 reflector.go:273] pkg/controller/disruption/disruption.go:285: forcing resync I0125 05:16:33.858846 4678 reflector.go:273] pkg/controller/petset/pet_set.go:148: forcing resync I0125 05:16:33.990119 4678 reflector.go:273] pkg/controller/informers/factory.go:89: forcing resync I0125 05:16:34.010385 4678 gc_controller.go:175] GC'ing orphaned I0125 05:16:34.010404 4678 gc_controller.go:195] GC'ing unscheduled pods which are terminating. I0125 05:16:34.137940 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:455: forcing resync I0125 05:16:34.138129 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:159: forcing resync I0125 05:16:34.138330 4678 reflector.go:273] pkg/controller/volume/persistentvolume/pv_controller_base.go:454: forcing resync I0125 05:16:35.392017 4678 audit.go:125] 2017-01-25T05:16:35.391982302-05:00 AUDIT: id="8916706a-d77c-4c24-9efa-0f43c162c537" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="default" uri="/api/v1/namespaces/default/services/kubernetes" I0125 05:16:35.392934 4678 audit.go:45] 2017-01-25T05:16:35.3929223-05:00 AUDIT: id="8916706a-d77c-4c24-9efa-0f43c162c537" response="200" I0125 05:16:35.393004 4678 panics.go:76] GET /api/v1/namespaces/default/services/kubernetes: (1.215224ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:35.684588 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:36.550561 4678 helpers.go:101] Unable to get network stats from pid 12728: couldn't read network stats: failure opening /proc/12728/net/dev: open /proc/12728/net/dev: no such file or directory I0125 05:16:36.796977 4678 worker.go:45] 0 Health Check Listeners I0125 05:16:36.797006 4678 worker.go:46] 2 Services registered for health checking I0125 05:16:36.797012 4678 worker.go:50] Service default/docker-registry has 1 local endpoints I0125 05:16:36.797017 4678 worker.go:50] Service default/router has 1 local endpoints I0125 05:16:36.801357 4678 helpers.go:101] Unable to get network stats from pid 10842: couldn't read network stats: failure opening /proc/10842/net/dev: open /proc/10842/net/dev: no such file or directory I0125 05:16:37.072644 4678 proxier.go:804] Syncing iptables rules I0125 05:16:37.072669 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t filter] I0125 05:16:37.088756 4678 iptables.go:362] running iptables -N [KUBE-SERVICES -t nat] I0125 05:16:37.098700 4678 iptables.go:362] running iptables -C [OUTPUT -t filter -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:37.108095 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:37.117510 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment kubernetes service portals -j KUBE-SERVICES] I0125 05:16:37.126718 4678 iptables.go:362] running iptables -N [KUBE-POSTROUTING -t nat] I0125 05:16:37.135897 4678 iptables.go:362] running iptables -C [POSTROUTING -t nat -m comment --comment kubernetes postrouting rules -j KUBE-POSTROUTING] I0125 05:16:37.145564 4678 iptables.go:298] running iptables-save [-t filter] I0125 05:16:37.155197 4678 iptables.go:298] running iptables-save [-t nat] I0125 05:16:37.166015 4678 proxier.go:1310] Restoring iptables rules: *filter :KUBE-SERVICES - [0:0] COMMIT *nat :KUBE-SERVICES - [0:0] :KUBE-NODEPORTS - [0:0] :KUBE-POSTROUTING - [0:0] :KUBE-MARK-MASQ - [0:0] :KUBE-SVC-3VQ6B3MLH7E2SZT4 - [0:0] :KUBE-SEP-ERAQI2Q3J62S7TUE - [0:0] :KUBE-SVC-BA6I5HTZKAAAJT56 - [0:0] :KUBE-SEP-4UGAMUWHIGKOXDDE - [0:0] :KUBE-SVC-GQKZAHCS5DTMHUQ6 - [0:0] :KUBE-SEP-ZBHUBIEXZKFKSF2Z - [0:0] :KUBE-SVC-IKV43KYNCXS2W7KZ - [0:0] :KUBE-SEP-ADXEJ56FADZVKKUX - [0:0] :KUBE-SVC-4JCRTMMYZAAYMIJ2 - [0:0] :KUBE-SEP-BSEFMBJ7ICVP2ZR3 - [0:0] :KUBE-SVC-ECTPRXTXBM34L34Q - [0:0] :KUBE-SEP-HFQWO7NQP2GQFJCX - [0:0] :KUBE-SVC-NPX46M4PTMTKRN6Y - [0:0] :KUBE-SEP-RTHE7RQVZQLKCHSP - [0:0] -A KUBE-POSTROUTING -m comment --comment "kubernetes service traffic requiring SNAT" -m mark --mark 0x00004000/0x00004000 -j MASQUERADE -A KUBE-MARK-MASQ -j MARK --set-xmark 0x00004000/0x00004000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns cluster IP" -m udp -p udp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-3VQ6B3MLH7E2SZT4 -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --rcheck --seconds 10800 --reap -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SVC-3VQ6B3MLH7E2SZT4 -m comment --comment default/kubernetes:dns -j KUBE-SEP-ERAQI2Q3J62S7TUE -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ERAQI2Q3J62S7TUE -m comment --comment default/kubernetes:dns -m recent --name KUBE-SEP-ERAQI2Q3J62S7TUE --set -m udp -p udp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/kubernetes:dns-tcp cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 53 -j KUBE-SVC-BA6I5HTZKAAAJT56 -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --rcheck --seconds 10800 --reap -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SVC-BA6I5HTZKAAAJT56 -m comment --comment default/kubernetes:dns-tcp -j KUBE-SEP-4UGAMUWHIGKOXDDE -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-4UGAMUWHIGKOXDDE -m comment --comment default/kubernetes:dns-tcp -m recent --name KUBE-SEP-4UGAMUWHIGKOXDDE --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8053 -A KUBE-SERVICES -m comment --comment "default/router:80-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 80 -j KUBE-SVC-GQKZAHCS5DTMHUQ6 -A KUBE-SVC-GQKZAHCS5DTMHUQ6 -m comment --comment default/router:80-tcp -j KUBE-SEP-ZBHUBIEXZKFKSF2Z -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ZBHUBIEXZKFKSF2Z -m comment --comment default/router:80-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:80 -A KUBE-SERVICES -m comment --comment "default/router:443-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 443 -j KUBE-SVC-IKV43KYNCXS2W7KZ -A KUBE-SVC-IKV43KYNCXS2W7KZ -m comment --comment default/router:443-tcp -j KUBE-SEP-ADXEJ56FADZVKKUX -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-ADXEJ56FADZVKKUX -m comment --comment default/router:443-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:443 -A KUBE-SERVICES -m comment --comment "default/router:1936-tcp cluster IP" -m tcp -p tcp -d 172.30.167.255/32 --dport 1936 -j KUBE-SVC-4JCRTMMYZAAYMIJ2 -A KUBE-SVC-4JCRTMMYZAAYMIJ2 -m comment --comment default/router:1936-tcp -j KUBE-SEP-BSEFMBJ7ICVP2ZR3 -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-BSEFMBJ7ICVP2ZR3 -m comment --comment default/router:1936-tcp -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:1936 -A KUBE-SERVICES -m comment --comment "default/docker-registry:5000-tcp cluster IP" -m tcp -p tcp -d 172.30.17.116/32 --dport 5000 -j KUBE-SVC-ECTPRXTXBM34L34Q -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --rcheck --seconds 10800 --reap -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SVC-ECTPRXTXBM34L34Q -m comment --comment default/docker-registry:5000-tcp -j KUBE-SEP-HFQWO7NQP2GQFJCX -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -s 172.17.0.3/32 -j KUBE-MARK-MASQ -A KUBE-SEP-HFQWO7NQP2GQFJCX -m comment --comment default/docker-registry:5000-tcp -m recent --name KUBE-SEP-HFQWO7NQP2GQFJCX --set -m tcp -p tcp -j DNAT --to-destination 172.17.0.3:5000 -A KUBE-SERVICES -m comment --comment "default/kubernetes:https cluster IP" -m tcp -p tcp -d 172.30.0.1/32 --dport 443 -j KUBE-SVC-NPX46M4PTMTKRN6Y -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --rcheck --seconds 10800 --reap -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SVC-NPX46M4PTMTKRN6Y -m comment --comment default/kubernetes:https -j KUBE-SEP-RTHE7RQVZQLKCHSP -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -s 172.18.7.222/32 -j KUBE-MARK-MASQ -A KUBE-SEP-RTHE7RQVZQLKCHSP -m comment --comment default/kubernetes:https -m recent --name KUBE-SEP-RTHE7RQVZQLKCHSP --set -m tcp -p tcp -j DNAT --to-destination 172.18.7.222:8443 -A KUBE-SERVICES -m comment --comment "kubernetes service nodeports; NOTE: this must be the last rule in this chain" -m addrtype --dst-type LOCAL -j KUBE-NODEPORTS COMMIT I0125 05:16:37.166044 4678 iptables.go:339] running iptables-restore [--noflush --counters] I0125 05:16:37.177761 4678 proxier.go:797] syncProxyRules took 105.117104ms I0125 05:16:37.177788 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-CONTAINER -t nat] I0125 05:16:37.187708 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-CONTAINER] I0125 05:16:37.196998 4678 iptables.go:362] running iptables -N [KUBE-PORTALS-HOST -t nat] I0125 05:16:37.206102 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m comment --comment handle ClusterIPs; NOTE: this must be before the NodePort rules -j KUBE-PORTALS-HOST] I0125 05:16:37.215258 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-CONTAINER -t nat] I0125 05:16:37.224313 4678 iptables.go:362] running iptables -C [PREROUTING -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-CONTAINER] I0125 05:16:37.234009 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-HOST -t nat] I0125 05:16:37.243012 4678 iptables.go:362] running iptables -C [OUTPUT -t nat -m addrtype --dst-type LOCAL -m comment --comment handle service NodePorts; NOTE: this must be the last rule in the chain -j KUBE-NODEPORT-HOST] I0125 05:16:37.253345 4678 iptables.go:362] running iptables -N [KUBE-NODEPORT-NON-LOCAL -t filter] I0125 05:16:37.262694 4678 iptables.go:362] running iptables -C [INPUT -t filter -m comment --comment Ensure that non-local NodePort traffic can flow -j KUBE-NODEPORT-NON-LOCAL] I0125 05:16:37.367003 4678 helpers.go:101] Unable to get network stats from pid 12824: couldn't read network stats: failure opening /proc/12824/net/dev: open /proc/12824/net/dev: no such file or directory I0125 05:16:37.476024 4678 audit.go:125] 2017-01-25T05:16:37.475976483-05:00 AUDIT: id="4c44fb5b-41be-460c-a784-2f82bff062d1" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:16:37.476043 4678 audit.go:125] 2017-01-25T05:16:37.476011373-05:00 AUDIT: id="88a207ac-06ae-43ee-a160-492e07946035" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:16:37.477135 4678 audit.go:45] 2017-01-25T05:16:37.477115752-05:00 AUDIT: id="88a207ac-06ae-43ee-a160-492e07946035" response="200" I0125 05:16:37.477166 4678 audit.go:45] 2017-01-25T05:16:37.477155416-05:00 AUDIT: id="4c44fb5b-41be-460c-a784-2f82bff062d1" response="200" I0125 05:16:37.477227 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (1.537818ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:37.477227 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (3.3582ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.477721 4678 namespace_controller_utils.go:352] namespace controller - deleteAllContent - namespace: extended-test-postgresql-replication-2-7n81h-cp7jp, gvrs: [{apps v1beta1 statefulsets} {autoscaling v1 horizontalpodautoscalers} {batch v1 jobs} {batch v2alpha1 cronjobs} {batch v2alpha1 scheduledjobs} {extensions v1beta1 daemonsets} {extensions v1beta1 deployments} {extensions v1beta1 horizontalpodautoscalers} {extensions v1beta1 ingresses} {extensions v1beta1 jobs} {extensions v1beta1 networkpolicies} {extensions v1beta1 replicasets} {extensions v1beta1 replicationcontrollers} {policy v1beta1 poddisruptionbudgets} { v1 bindings} { v1 configmaps} { v1 endpoints} { v1 events} { v1 limitranges} { v1 persistentvolumeclaims} { v1 serviceaccounts} { v1 podtemplates} { v1 replicationcontrollers} { v1 resourcequotas} { v1 secrets} { v1 services} { v1 pods}] I0125 05:16:37.479656 4678 audit.go:125] 2017-01-25T05:16:37.479632721-05:00 AUDIT: id="1ccb676e-c7f6-4871-b221-825650b3de9f" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/statefulsets" I0125 05:16:37.480588 4678 audit.go:45] 2017-01-25T05:16:37.480577915-05:00 AUDIT: id="1ccb676e-c7f6-4871-b221-825650b3de9f" response="200" I0125 05:16:37.480643 4678 panics.go:76] DELETE /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/statefulsets: (2.511561ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.482372 4678 audit.go:125] 2017-01-25T05:16:37.48234759-05:00 AUDIT: id="ea6e3167-d7af-47f6-8a71-32a05a6ddfef" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/statefulsets" I0125 05:16:37.483101 4678 audit.go:45] 2017-01-25T05:16:37.483091398-05:00 AUDIT: id="ea6e3167-d7af-47f6-8a71-32a05a6ddfef" response="200" I0125 05:16:37.483173 4678 panics.go:76] GET /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/statefulsets: (2.170361ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.485258 4678 audit.go:125] 2017-01-25T05:16:37.485227189-05:00 AUDIT: id="7a343c02-a412-40bd-a4fe-f0b1f6aa5b43" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers" I0125 05:16:37.486107 4678 audit.go:45] 2017-01-25T05:16:37.486097684-05:00 AUDIT: id="7a343c02-a412-40bd-a4fe-f0b1f6aa5b43" response="200" I0125 05:16:37.486163 4678 panics.go:76] DELETE /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers: (2.409926ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.488043 4678 audit.go:125] 2017-01-25T05:16:37.48801882-05:00 AUDIT: id="b0c8faaf-387d-4683-8d8f-647fa90c1c64" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers" I0125 05:16:37.488708 4678 audit.go:45] 2017-01-25T05:16:37.48869769-05:00 AUDIT: id="b0c8faaf-387d-4683-8d8f-647fa90c1c64" response="200" I0125 05:16:37.488771 4678 panics.go:76] GET /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers: (2.200565ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.490793 4678 audit.go:125] 2017-01-25T05:16:37.490769926-05:00 AUDIT: id="6cc629f5-2903-45d8-bf5f-fe322223f912" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs" I0125 05:16:37.491725 4678 audit.go:45] 2017-01-25T05:16:37.491715636-05:00 AUDIT: id="6cc629f5-2903-45d8-bf5f-fe322223f912" response="200" I0125 05:16:37.491775 4678 panics.go:76] DELETE /apis/batch/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs: (2.450875ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.493611 4678 audit.go:125] 2017-01-25T05:16:37.493588735-05:00 AUDIT: id="0526366d-dba6-4616-bca7-c529828286ae" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs" I0125 05:16:37.494309 4678 audit.go:45] 2017-01-25T05:16:37.494299309-05:00 AUDIT: id="0526366d-dba6-4616-bca7-c529828286ae" response="200" I0125 05:16:37.494364 4678 panics.go:76] GET /apis/batch/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs: (2.225658ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.496273 4678 audit.go:125] 2017-01-25T05:16:37.496248537-05:00 AUDIT: id="c659014a-5423-40f4-98e7-c8bf7d7c000e" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/cronjobs" I0125 05:16:37.497174 4678 audit.go:45] 2017-01-25T05:16:37.497164141-05:00 AUDIT: id="c659014a-5423-40f4-98e7-c8bf7d7c000e" response="200" I0125 05:16:37.497239 4678 panics.go:76] DELETE /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/cronjobs: (2.345847ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.499095 4678 audit.go:125] 2017-01-25T05:16:37.49906547-05:00 AUDIT: id="a3e12a71-ba24-45fd-9fcc-cc2e945b0708" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/cronjobs" I0125 05:16:37.499787 4678 audit.go:45] 2017-01-25T05:16:37.49977706-05:00 AUDIT: id="a3e12a71-ba24-45fd-9fcc-cc2e945b0708" response="200" I0125 05:16:37.499848 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/cronjobs: (2.223234ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.501728 4678 audit.go:125] 2017-01-25T05:16:37.501705557-05:00 AUDIT: id="be2f9359-9f36-47de-9489-f1b50b48684f" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/scheduledjobs" I0125 05:16:37.502562 4678 audit.go:45] 2017-01-25T05:16:37.502551852-05:00 AUDIT: id="be2f9359-9f36-47de-9489-f1b50b48684f" response="200" I0125 05:16:37.502611 4678 panics.go:76] DELETE /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/scheduledjobs: (2.228447ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.504541 4678 audit.go:125] 2017-01-25T05:16:37.504518272-05:00 AUDIT: id="16468f70-ddc5-4df5-a957-9b293b73e21e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/scheduledjobs" I0125 05:16:37.505097 4678 audit.go:45] 2017-01-25T05:16:37.505083404-05:00 AUDIT: id="16468f70-ddc5-4df5-a957-9b293b73e21e" response="200" I0125 05:16:37.505144 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/scheduledjobs: (2.166709ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.506882 4678 audit.go:125] 2017-01-25T05:16:37.506859292-05:00 AUDIT: id="9e8543dc-2b05-4132-ad88-780ac6f53e7e" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/daemonsets" I0125 05:16:37.507763 4678 audit.go:45] 2017-01-25T05:16:37.507753678-05:00 AUDIT: id="9e8543dc-2b05-4132-ad88-780ac6f53e7e" response="200" I0125 05:16:37.507822 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/daemonsets: (2.215278ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.509685 4678 audit.go:125] 2017-01-25T05:16:37.509661808-05:00 AUDIT: id="add38625-edd9-4eee-8af6-668347e1947e" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/daemonsets" I0125 05:16:37.510372 4678 audit.go:45] 2017-01-25T05:16:37.510362432-05:00 AUDIT: id="add38625-edd9-4eee-8af6-668347e1947e" response="200" I0125 05:16:37.510451 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/daemonsets: (2.219637ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.512258 4678 audit.go:125] 2017-01-25T05:16:37.512232165-05:00 AUDIT: id="61a7f85e-2ac7-47d9-8ba7-b997b0f1e988" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deployments" I0125 05:16:37.513164 4678 audit.go:45] 2017-01-25T05:16:37.513154442-05:00 AUDIT: id="61a7f85e-2ac7-47d9-8ba7-b997b0f1e988" response="200" I0125 05:16:37.513228 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deployments: (2.293399ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.515108 4678 audit.go:125] 2017-01-25T05:16:37.515084505-05:00 AUDIT: id="baf4f54c-3648-4165-bdd7-28429886db08" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deployments" I0125 05:16:37.515804 4678 audit.go:45] 2017-01-25T05:16:37.515794047-05:00 AUDIT: id="baf4f54c-3648-4165-bdd7-28429886db08" response="200" I0125 05:16:37.515855 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deployments: (2.23702ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.517666 4678 audit.go:125] 2017-01-25T05:16:37.517634488-05:00 AUDIT: id="20f3ac4b-83c5-4bbb-a185-19fcef6669c1" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers" I0125 05:16:37.518523 4678 audit.go:45] 2017-01-25T05:16:37.518512224-05:00 AUDIT: id="20f3ac4b-83c5-4bbb-a185-19fcef6669c1" response="200" I0125 05:16:37.518577 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers: (2.179078ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.520369 4678 audit.go:125] 2017-01-25T05:16:37.520345331-05:00 AUDIT: id="d25d01e8-4535-4f3a-8d27-ad6aa03da7d8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers" I0125 05:16:37.521058 4678 audit.go:45] 2017-01-25T05:16:37.52104787-05:00 AUDIT: id="d25d01e8-4535-4f3a-8d27-ad6aa03da7d8" response="200" I0125 05:16:37.521108 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers: (2.12897ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.523065 4678 audit.go:125] 2017-01-25T05:16:37.523041291-05:00 AUDIT: id="dbe3ca06-6d05-4364-bb7a-ef8cb7623ed0" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/ingresses" I0125 05:16:37.523931 4678 audit.go:45] 2017-01-25T05:16:37.523920802-05:00 AUDIT: id="dbe3ca06-6d05-4364-bb7a-ef8cb7623ed0" response="200" I0125 05:16:37.523983 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/ingresses: (2.31951ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.525880 4678 audit.go:125] 2017-01-25T05:16:37.525856538-05:00 AUDIT: id="eeffd3fd-d733-4659-9eaa-346a92d34c3f" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/ingresses" I0125 05:16:37.526566 4678 audit.go:45] 2017-01-25T05:16:37.526555847-05:00 AUDIT: id="eeffd3fd-d733-4659-9eaa-346a92d34c3f" response="200" I0125 05:16:37.526616 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/ingresses: (2.245473ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.528616 4678 audit.go:125] 2017-01-25T05:16:37.528592313-05:00 AUDIT: id="574e3a61-7b72-4239-bdb8-54c0aa5f6fea" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs" I0125 05:16:37.529424 4678 audit.go:45] 2017-01-25T05:16:37.52941414-05:00 AUDIT: id="574e3a61-7b72-4239-bdb8-54c0aa5f6fea" response="200" I0125 05:16:37.529473 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs: (2.345871ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.531285 4678 audit.go:125] 2017-01-25T05:16:37.531261755-05:00 AUDIT: id="70d66624-bb7a-4ff9-aa8d-9c5d1521c6ff" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs" I0125 05:16:37.532005 4678 audit.go:45] 2017-01-25T05:16:37.531995317-05:00 AUDIT: id="70d66624-bb7a-4ff9-aa8d-9c5d1521c6ff" response="200" I0125 05:16:37.532050 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs: (2.171811ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.533707 4678 audit.go:125] 2017-01-25T05:16:37.533683223-05:00 AUDIT: id="b3ced9fe-4dcb-47a2-8c7d-406122a6e7b6" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/networkpolicies" I0125 05:16:37.534561 4678 audit.go:45] 2017-01-25T05:16:37.534550575-05:00 AUDIT: id="b3ced9fe-4dcb-47a2-8c7d-406122a6e7b6" response="200" I0125 05:16:37.534615 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/networkpolicies: (2.086289ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.536090 4678 audit.go:125] 2017-01-25T05:16:37.536067097-05:00 AUDIT: id="28de3420-4a8f-46f2-80f1-cfe9d459c0f3" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/networkpolicies" I0125 05:16:37.536799 4678 audit.go:45] 2017-01-25T05:16:37.536789508-05:00 AUDIT: id="28de3420-4a8f-46f2-80f1-cfe9d459c0f3" response="200" I0125 05:16:37.536847 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/networkpolicies: (1.924791ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.538740 4678 audit.go:125] 2017-01-25T05:16:37.538714269-05:00 AUDIT: id="79b28ff3-4e69-4a2c-b4b0-bcd8b4105401" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicasets" I0125 05:16:37.539631 4678 audit.go:45] 2017-01-25T05:16:37.539620782-05:00 AUDIT: id="79b28ff3-4e69-4a2c-b4b0-bcd8b4105401" response="200" I0125 05:16:37.539689 4678 panics.go:76] DELETE /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicasets: (2.317934ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.541545 4678 audit.go:125] 2017-01-25T05:16:37.541520727-05:00 AUDIT: id="8425f816-7650-4afb-9e0b-d98683572a48" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicasets" I0125 05:16:37.542237 4678 audit.go:45] 2017-01-25T05:16:37.54222456-05:00 AUDIT: id="8425f816-7650-4afb-9e0b-d98683572a48" response="200" I0125 05:16:37.542296 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicasets: (2.214424ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.544091 4678 audit.go:125] 2017-01-25T05:16:37.544067921-05:00 AUDIT: id="11e2a44d-e772-4e73-b544-3474c26f05bc" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/poddisruptionbudgets" I0125 05:16:37.544841 4678 audit.go:45] 2017-01-25T05:16:37.544831346-05:00 AUDIT: id="11e2a44d-e772-4e73-b544-3474c26f05bc" response="200" I0125 05:16:37.544892 4678 panics.go:76] DELETE /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/poddisruptionbudgets: (2.076133ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.546593 4678 audit.go:125] 2017-01-25T05:16:37.54657027-05:00 AUDIT: id="c192636a-4008-491a-a39c-efc9202cd440" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/poddisruptionbudgets" I0125 05:16:37.547306 4678 audit.go:45] 2017-01-25T05:16:37.547296322-05:00 AUDIT: id="c192636a-4008-491a-a39c-efc9202cd440" response="200" I0125 05:16:37.547355 4678 panics.go:76] GET /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/poddisruptionbudgets: (2.170473ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.549385 4678 audit.go:125] 2017-01-25T05:16:37.549361669-05:00 AUDIT: id="d81ca1bf-9608-40fa-a9c6-c39d4372bd2f" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/configmaps" I0125 05:16:37.550402 4678 audit.go:45] 2017-01-25T05:16:37.550392284-05:00 AUDIT: id="d81ca1bf-9608-40fa-a9c6-c39d4372bd2f" response="200" I0125 05:16:37.550464 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/configmaps: (2.509742ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.552244 4678 audit.go:125] 2017-01-25T05:16:37.552206986-05:00 AUDIT: id="67b55303-5386-4f1e-8eb7-0efe0c2c6bda" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/configmaps" I0125 05:16:37.552958 4678 audit.go:45] 2017-01-25T05:16:37.552945008-05:00 AUDIT: id="67b55303-5386-4f1e-8eb7-0efe0c2c6bda" response="200" I0125 05:16:37.553012 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/configmaps: (2.162578ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.554833 4678 audit.go:125] 2017-01-25T05:16:37.554808211-05:00 AUDIT: id="64bd1866-8d2c-4321-863c-03bdddf5c920" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/endpoints" I0125 05:16:37.555675 4678 audit.go:45] 2017-01-25T05:16:37.555665091-05:00 AUDIT: id="64bd1866-8d2c-4321-863c-03bdddf5c920" response="200" I0125 05:16:37.555726 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/endpoints: (2.164057ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.557389 4678 audit.go:125] 2017-01-25T05:16:37.557362429-05:00 AUDIT: id="b3f09b42-a625-4382-b236-210853855975" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/endpoints" I0125 05:16:37.558143 4678 audit.go:45] 2017-01-25T05:16:37.558132807-05:00 AUDIT: id="b3f09b42-a625-4382-b236-210853855975" response="200" I0125 05:16:37.558194 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/endpoints: (2.180383ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.560189 4678 audit.go:125] 2017-01-25T05:16:37.560166881-05:00 AUDIT: id="c0fe9bad-dc75-4029-a214-cbc43d448508" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events" I0125 05:16:37.561056 4678 audit.go:45] 2017-01-25T05:16:37.561046222-05:00 AUDIT: id="c0fe9bad-dc75-4029-a214-cbc43d448508" response="200" I0125 05:16:37.561109 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events: (2.373199ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.562941 4678 audit.go:125] 2017-01-25T05:16:37.562918735-05:00 AUDIT: id="a1ceb9ef-f8bc-4e81-bc38-275509cefc14" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events" I0125 05:16:37.563658 4678 audit.go:45] 2017-01-25T05:16:37.563648516-05:00 AUDIT: id="a1ceb9ef-f8bc-4e81-bc38-275509cefc14" response="200" I0125 05:16:37.563705 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events: (2.260351ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.565712 4678 audit.go:125] 2017-01-25T05:16:37.565689364-05:00 AUDIT: id="4cfec9d1-d1ab-4edd-9609-c089bc799868" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/limitranges" I0125 05:16:37.566614 4678 audit.go:45] 2017-01-25T05:16:37.566601317-05:00 AUDIT: id="4cfec9d1-d1ab-4edd-9609-c089bc799868" response="200" I0125 05:16:37.566679 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/limitranges: (2.422852ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.568524 4678 audit.go:125] 2017-01-25T05:16:37.568499671-05:00 AUDIT: id="45653821-7cef-40c9-9ec0-4c11c548af2d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/limitranges" I0125 05:16:37.569213 4678 audit.go:45] 2017-01-25T05:16:37.569190865-05:00 AUDIT: id="45653821-7cef-40c9-9ec0-4c11c548af2d" response="200" I0125 05:16:37.569269 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/limitranges: (2.203148ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.571252 4678 audit.go:125] 2017-01-25T05:16:37.571221757-05:00 AUDIT: id="750d546e-c446-4f23-930a-95c2d7264455" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/persistentvolumeclaims" I0125 05:16:37.572123 4678 audit.go:45] 2017-01-25T05:16:37.572112984-05:00 AUDIT: id="750d546e-c446-4f23-930a-95c2d7264455" response="200" I0125 05:16:37.572180 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/persistentvolumeclaims: (2.369687ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.573942 4678 audit.go:125] 2017-01-25T05:16:37.573919134-05:00 AUDIT: id="fc05651a-938b-4c81-b189-693df3658606" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/persistentvolumeclaims" I0125 05:16:37.574651 4678 audit.go:45] 2017-01-25T05:16:37.574641104-05:00 AUDIT: id="fc05651a-938b-4c81-b189-693df3658606" response="200" I0125 05:16:37.574707 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/persistentvolumeclaims: (2.146054ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.576687 4678 audit.go:125] 2017-01-25T05:16:37.576663625-05:00 AUDIT: id="8ffdba4f-5a2e-4d7d-9f25-f3ed2067e830" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts" I0125 05:16:37.579751 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-2-7n81h-cp7jp/builder), service account deleted, removing tokens I0125 05:16:37.580224 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (3.162µs) I0125 05:16:37.580730 4678 audit.go:125] 2017-01-25T05:16:37.58069775-05:00 AUDIT: id="6a8a2c57-aadb-47d1-bb18-432c02d0612a" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-dmhm5" I0125 05:16:37.582386 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (1.801µs) I0125 05:16:37.582525 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-2-7n81h-cp7jp/default), service account deleted, removing tokens I0125 05:16:37.583320 4678 audit.go:125] 2017-01-25T05:16:37.583285072-05:00 AUDIT: id="7f3837b2-3008-4119-8c93-aa19259bf017" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-wdz0z" I0125 05:16:37.584126 4678 audit.go:45] 2017-01-25T05:16:37.584110284-05:00 AUDIT: id="6a8a2c57-aadb-47d1-bb18-432c02d0612a" response="200" I0125 05:16:37.584174 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-dmhm5: (3.722658ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.586506 4678 audit.go:45] 2017-01-25T05:16:37.586492799-05:00 AUDIT: id="8ffdba4f-5a2e-4d7d-9f25-f3ed2067e830" response="200" I0125 05:16:37.586630 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts: (11.367046ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.587490 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (3.312µs) I0125 05:16:37.587553 4678 tokens_controller.go:265] syncServiceAccount(extended-test-postgresql-replication-2-7n81h-cp7jp/deployer), service account deleted, removing tokens I0125 05:16:37.587742 4678 audit.go:125] 2017-01-25T05:16:37.587707611-05:00 AUDIT: id="fe2a73d0-9c08-48e1-a992-505f4903f222" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt" I0125 05:16:37.588240 4678 audit.go:125] 2017-01-25T05:16:37.588191656-05:00 AUDIT: id="b4d7c19e-fc05-4661-91da-81778323f16c" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2" I0125 05:16:37.588446 4678 audit.go:125] 2017-01-25T05:16:37.588413693-05:00 AUDIT: id="22954017-5897-40d7-88e8-98f853a084d8" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:37.591128 4678 audit.go:45] 2017-01-25T05:16:37.591113881-05:00 AUDIT: id="7f3837b2-3008-4119-8c93-aa19259bf017" response="200" I0125 05:16:37.591172 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-wdz0z: (8.126145ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.591584 4678 audit.go:45] 2017-01-25T05:16:37.591570855-05:00 AUDIT: id="22954017-5897-40d7-88e8-98f853a084d8" response="200" I0125 05:16:37.591671 4678 audit.go:125] 2017-01-25T05:16:37.591640334-05:00 AUDIT: id="fba72654-23f0-4026-9aad-8c0b93c14904" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts" I0125 05:16:37.592137 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (3.956872ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.593587 4678 audit.go:125] 2017-01-25T05:16:37.593561981-05:00 AUDIT: id="d05c0a97-17dc-44d6-ba9b-0686de7ab113" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:37.594022 4678 audit.go:125] 2017-01-25T05:16:37.594000901-05:00 AUDIT: id="100fb40e-0b43-46fb-92ad-5ca9ee6dde92" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7" I0125 05:16:37.594604 4678 audit.go:45] 2017-01-25T05:16:37.594590771-05:00 AUDIT: id="fba72654-23f0-4026-9aad-8c0b93c14904" response="200" I0125 05:16:37.594672 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts: (5.895251ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.597069 4678 audit.go:45] 2017-01-25T05:16:37.597055466-05:00 AUDIT: id="d05c0a97-17dc-44d6-ba9b-0686de7ab113" response="200" I0125 05:16:37.597282 4678 audit.go:45] 2017-01-25T05:16:37.597269619-05:00 AUDIT: id="fe2a73d0-9c08-48e1-a992-505f4903f222" response="200" I0125 05:16:37.597321 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt: (9.852884ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.597598 4678 audit.go:45] 2017-01-25T05:16:37.597585063-05:00 AUDIT: id="b4d7c19e-fc05-4661-91da-81778323f16c" response="200" I0125 05:16:37.597635 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2: (9.670678ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.597669 4678 audit.go:125] 2017-01-25T05:16:37.597638271-05:00 AUDIT: id="682189fc-616b-4456-82c9-5d41b10d617f" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/podtemplates" I0125 05:16:37.598858 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (5.467688ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.599996 4678 audit.go:45] 2017-01-25T05:16:37.599979272-05:00 AUDIT: id="682189fc-616b-4456-82c9-5d41b10d617f" response="200" I0125 05:16:37.600062 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/podtemplates: (4.501431ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.600514 4678 audit.go:125] 2017-01-25T05:16:37.600481635-05:00 AUDIT: id="bb0513cc-877a-4abe-bbb6-ba204310e50d" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-zr08x" I0125 05:16:37.601345 4678 audit.go:125] 2017-01-25T05:16:37.601312156-05:00 AUDIT: id="0e328d44-b46f-4912-8acd-8e13a9728b81" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:37.603045 4678 audit.go:45] 2017-01-25T05:16:37.603031203-05:00 AUDIT: id="100fb40e-0b43-46fb-92ad-5ca9ee6dde92" response="200" I0125 05:16:37.603086 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7: (9.218149ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.603394 4678 audit.go:45] 2017-01-25T05:16:37.603381732-05:00 AUDIT: id="0e328d44-b46f-4912-8acd-8e13a9728b81" response="200" I0125 05:16:37.603610 4678 audit.go:125] 2017-01-25T05:16:37.603577618-05:00 AUDIT: id="474e6c72-f02e-4753-9881-ccff2712c31d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/podtemplates" I0125 05:16:37.604451 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (3.385457ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.605152 4678 audit.go:45] 2017-01-25T05:16:37.60513861-05:00 AUDIT: id="474e6c72-f02e-4753-9881-ccff2712c31d" response="200" I0125 05:16:37.605248 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/podtemplates: (4.102627ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.605809 4678 audit.go:125] 2017-01-25T05:16:37.605786143-05:00 AUDIT: id="7c97ed47-999f-4a76-a1f3-f8fcd14b7947" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-dockercfg-mt0j2" I0125 05:16:37.606834 4678 audit.go:45] 2017-01-25T05:16:37.606820776-05:00 AUDIT: id="bb0513cc-877a-4abe-bbb6-ba204310e50d" response="200" I0125 05:16:37.606875 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-zr08x: (6.620243ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.609725 4678 audit.go:45] 2017-01-25T05:16:37.609709898-05:00 AUDIT: id="7c97ed47-999f-4a76-a1f3-f8fcd14b7947" response="200" I0125 05:16:37.609763 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-dockercfg-mt0j2: (4.118576ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.609801 4678 audit.go:125] 2017-01-25T05:16:37.609769222-05:00 AUDIT: id="a5b6a1c5-9536-4613-8f1c-0f429eb60b6d" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers" I0125 05:16:37.611233 4678 audit.go:45] 2017-01-25T05:16:37.611216561-05:00 AUDIT: id="a5b6a1c5-9536-4613-8f1c-0f429eb60b6d" response="200" I0125 05:16:37.611301 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers: (3.282921ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.611585 4678 audit.go:125] 2017-01-25T05:16:37.611552767-05:00 AUDIT: id="87b63f68-0b5f-4a3f-bcf6-027f96fa6e4c" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:37.612248 4678 audit.go:125] 2017-01-25T05:16:37.61221663-05:00 AUDIT: id="579faa32-3a56-4803-b9f8-0f8b9682ba63" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder" I0125 05:16:37.613145 4678 audit.go:45] 2017-01-25T05:16:37.613131827-05:00 AUDIT: id="579faa32-3a56-4803-b9f8-0f8b9682ba63" response="404" I0125 05:16:37.613193 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/builder: (1.212379ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.613463 4678 audit.go:45] 2017-01-25T05:16:37.613450939-05:00 AUDIT: id="87b63f68-0b5f-4a3f-bcf6-027f96fa6e4c" response="200" I0125 05:16:37.613649 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.32473ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.613952 4678 audit.go:125] 2017-01-25T05:16:37.613917021-05:00 AUDIT: id="e298d618-a020-43fe-b133-b3908e77509d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers" I0125 05:16:37.614622 4678 audit.go:125] 2017-01-25T05:16:37.614589862-05:00 AUDIT: id="0e1fdcc4-5d6d-4a31-a409-5fd62517a171" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-dockercfg-vxkfr" I0125 05:16:37.614785 4678 audit.go:45] 2017-01-25T05:16:37.614771856-05:00 AUDIT: id="e298d618-a020-43fe-b133-b3908e77509d" response="200" I0125 05:16:37.614855 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers: (3.142835ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.615297 4678 audit.go:125] 2017-01-25T05:16:37.61526528-05:00 AUDIT: id="442ec158-a67e-4d82-be90-317b9d2d24c3" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt" I0125 05:16:37.616318 4678 audit.go:45] 2017-01-25T05:16:37.616302788-05:00 AUDIT: id="442ec158-a67e-4d82-be90-317b9d2d24c3" response="404" I0125 05:16:37.616361 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/builder-token-m01kt: (1.34021ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.617619 4678 audit.go:125] 2017-01-25T05:16:37.617596516-05:00 AUDIT: id="d1fc3fd5-466e-4986-bb4e-9281c093409c" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas" I0125 05:16:37.618881 4678 audit.go:45] 2017-01-25T05:16:37.618865432-05:00 AUDIT: id="0e1fdcc4-5d6d-4a31-a409-5fd62517a171" response="200" I0125 05:16:37.618912 4678 audit.go:45] 2017-01-25T05:16:37.618900358-05:00 AUDIT: id="d1fc3fd5-466e-4986-bb4e-9281c093409c" response="200" I0125 05:16:37.618923 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-dockercfg-vxkfr: (4.568326ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.618973 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas: (3.122415ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.620499 4678 audit.go:125] 2017-01-25T05:16:37.620467143-05:00 AUDIT: id="0871d777-ff5b-48cc-8ec1-ceb0574af168" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer" I0125 05:16:37.621084 4678 audit.go:125] 2017-01-25T05:16:37.62105021-05:00 AUDIT: id="c0b2b2af-34dd-431e-a35a-e440d2d709d2" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:37.621367 4678 audit.go:125] 2017-01-25T05:16:37.621332858-05:00 AUDIT: id="a3366539-c12e-40a0-8dc6-f2ad79d74742" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas" I0125 05:16:37.621940 4678 audit.go:45] 2017-01-25T05:16:37.621928176-05:00 AUDIT: id="0871d777-ff5b-48cc-8ec1-ceb0574af168" response="404" I0125 05:16:37.621987 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/deployer: (1.751833ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.622731 4678 audit.go:125] 2017-01-25T05:16:37.622700251-05:00 AUDIT: id="76dcefa1-4b04-480a-843d-321f303f02be" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2" I0125 05:16:37.622748 4678 audit.go:45] 2017-01-25T05:16:37.622736991-05:00 AUDIT: id="c0b2b2af-34dd-431e-a35a-e440d2d709d2" response="200" I0125 05:16:37.622924 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (2.103708ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.623134 4678 audit.go:45] 2017-01-25T05:16:37.623120503-05:00 AUDIT: id="a3366539-c12e-40a0-8dc6-f2ad79d74742" response="200" I0125 05:16:37.623210 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas: (3.568867ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.623626 4678 audit.go:125] 2017-01-25T05:16:37.623592159-05:00 AUDIT: id="fce245ec-5f04-4764-97b7-c907b9b6cf60" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-dockercfg-9lh9b" I0125 05:16:37.623835 4678 audit.go:45] 2017-01-25T05:16:37.623823351-05:00 AUDIT: id="76dcefa1-4b04-480a-843d-321f303f02be" response="404" I0125 05:16:37.623875 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/deployer-token-m3wb2: (1.409167ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.626232 4678 audit.go:125] 2017-01-25T05:16:37.626183766-05:00 AUDIT: id="20f00398-1bf3-47b0-b53b-f4f07ce4ce47" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:16:37.626419 4678 audit.go:45] 2017-01-25T05:16:37.626405908-05:00 AUDIT: id="fce245ec-5f04-4764-97b7-c907b9b6cf60" response="200" I0125 05:16:37.626459 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-dockercfg-9lh9b: (3.098412ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.627363 4678 audit.go:45] 2017-01-25T05:16:37.627345638-05:00 AUDIT: id="20f00398-1bf3-47b0-b53b-f4f07ce4ce47" response="200" I0125 05:16:37.627419 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (2.915265ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.628177 4678 audit.go:125] 2017-01-25T05:16:37.628146386-05:00 AUDIT: id="828fa955-ca81-4ae9-9997-d6f6b238b27d" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default" I0125 05:16:37.628782 4678 audit.go:125] 2017-01-25T05:16:37.628747503-05:00 AUDIT: id="cbd3e44e-9806-4b1a-9d5c-fe324fedb3da" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg" I0125 05:16:37.629670 4678 audit.go:45] 2017-01-25T05:16:37.629657786-05:00 AUDIT: id="828fa955-ca81-4ae9-9997-d6f6b238b27d" response="404" I0125 05:16:37.629712 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts/default: (1.797897ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.629846 4678 audit.go:45] 2017-01-25T05:16:37.629833821-05:00 AUDIT: id="cbd3e44e-9806-4b1a-9d5c-fe324fedb3da" response="200" I0125 05:16:37.629888 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets?fieldSelector=type%3Dkubernetes.io%2Fdockercfg: (1.365873ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.630161 4678 audit.go:125] 2017-01-25T05:16:37.630130985-05:00 AUDIT: id="e17faaf4-27c4-4fce-a797-a469dd939ccb" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:16:37.630668 4678 audit.go:125] 2017-01-25T05:16:37.630623711-05:00 AUDIT: id="dd357616-f6f3-4d65-95b0-40a4bed5ca2f" ip="172.18.7.222" method="DELETE" user="system:openshift-master" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7" I0125 05:16:37.631014 4678 audit.go:45] 2017-01-25T05:16:37.631001132-05:00 AUDIT: id="e17faaf4-27c4-4fce-a797-a469dd939ccb" response="200" I0125 05:16:37.631067 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (3.128863ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.631521 4678 audit.go:45] 2017-01-25T05:16:37.631508113-05:00 AUDIT: id="dd357616-f6f3-4d65-95b0-40a4bed5ca2f" response="404" I0125 05:16:37.631562 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets/default-token-xjvj7: (1.170859ms) 404 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:37.633072 4678 audit.go:125] 2017-01-25T05:16:37.633045868-05:00 AUDIT: id="862f2903-d3c9-4d74-b424-855716519570" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/services" I0125 05:16:37.633947 4678 audit.go:45] 2017-01-25T05:16:37.633937545-05:00 AUDIT: id="862f2903-d3c9-4d74-b424-855716519570" response="200" I0125 05:16:37.634000 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/services: (2.370996ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.635959 4678 audit.go:125] 2017-01-25T05:16:37.63593544-05:00 AUDIT: id="709ce67a-ba1c-41ff-912d-88dec27bcb98" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/services" I0125 05:16:37.636697 4678 audit.go:45] 2017-01-25T05:16:37.636688052-05:00 AUDIT: id="709ce67a-ba1c-41ff-912d-88dec27bcb98" response="200" I0125 05:16:37.636753 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/services: (2.315298ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.638695 4678 audit.go:125] 2017-01-25T05:16:37.638672485-05:00 AUDIT: id="ac07ccf5-dcef-4866-8b44-e8b1f9a98ce8" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods" I0125 05:16:37.639448 4678 audit.go:45] 2017-01-25T05:16:37.639438724-05:00 AUDIT: id="ac07ccf5-dcef-4866-8b44-e8b1f9a98ce8" response="200" I0125 05:16:37.639512 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods: (2.300595ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.641502 4678 audit.go:125] 2017-01-25T05:16:37.641479822-05:00 AUDIT: id="54c0d8d0-b410-444d-b9be-1371b24c83f0" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods" I0125 05:16:37.642343 4678 audit.go:45] 2017-01-25T05:16:37.642333687-05:00 AUDIT: id="54c0d8d0-b410-444d-b9be-1371b24c83f0" response="200" I0125 05:16:37.642400 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods: (2.389761ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.644156 4678 audit.go:125] 2017-01-25T05:16:37.644124014-05:00 AUDIT: id="24fe8612-90df-46bf-a845-bc8dad8ac8cb" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods" I0125 05:16:37.644882 4678 audit.go:45] 2017-01-25T05:16:37.644872714-05:00 AUDIT: id="24fe8612-90df-46bf-a845-bc8dad8ac8cb" response="200" I0125 05:16:37.644938 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods: (2.236046ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.645161 4678 namespace_controller_utils.go:365] namespace controller - deleteAllContent - namespace: extended-test-postgresql-replication-2-7n81h-cp7jp, estimate: 0 I0125 05:16:37.646855 4678 audit.go:125] 2017-01-25T05:16:37.646831716-05:00 AUDIT: id="f3e1e731-60f9-425b-9a3d-3ad70d7e1d41" ip="172.18.7.222" method="PUT" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/finalize" I0125 05:16:37.648224 4678 audit.go:45] 2017-01-25T05:16:37.648210835-05:00 AUDIT: id="f3e1e731-60f9-425b-9a3d-3ad70d7e1d41" response="200" I0125 05:16:37.648298 4678 panics.go:76] PUT /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/finalize: (2.819105ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.648641 4678 serviceaccounts_controller.go:191] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (1.835µs) I0125 05:16:37.650336 4678 audit.go:125] 2017-01-25T05:16:37.650313022-05:00 AUDIT: id="83d9d394-7b88-4306-bbd9-c4032a44c7c1" ip="172.18.7.222" method="DELETE" user="system:serviceaccount:openshift-infra:namespace-controller" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:16:37.652920 4678 audit.go:45] 2017-01-25T05:16:37.652904292-05:00 AUDIT: id="83d9d394-7b88-4306-bbd9-c4032a44c7c1" response="200" I0125 05:16:37.652975 4678 panics.go:76] DELETE /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (4.145525ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:namespace-controller] 172.18.7.222:50846] I0125 05:16:37.653260 4678 namespace_controller.go:206] Finished syncing namespace "extended-test-postgresql-replication-2-7n81h-cp7jp" (527ns) I0125 05:16:37.684610 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:38.141494 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-mqueue.mount: invalid container name I0125 05:16:38.141515 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-mqueue.mount" I0125 05:16:38.141524 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-mqueue.mount", but ignoring. I0125 05:16:38.141532 4678 manager.go:867] ignoring container "/system.slice/dev-mqueue.mount" I0125 05:16:38.141540 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-debug.mount: invalid container name I0125 05:16:38.141543 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-debug.mount" I0125 05:16:38.141547 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-debug.mount", but ignoring. I0125 05:16:38.141553 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-debug.mount" I0125 05:16:38.141559 4678 factory.go:104] Error trying to work out if we can handle /system.slice/-.mount: invalid container name I0125 05:16:38.141562 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/-.mount" I0125 05:16:38.141565 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/-.mount", but ignoring. I0125 05:16:38.141571 4678 manager.go:867] ignoring container "/system.slice/-.mount" I0125 05:16:38.141597 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-router\x2dtoken\x2ds79l8.mount: invalid container name I0125 05:16:38.141602 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:16:38.141612 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount", but ignoring. I0125 05:16:38.141623 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-router\\x2dtoken\\x2ds79l8.mount" I0125 05:16:38.141652 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-e932e61a\x2de2d9\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-registry\x2dtoken\x2dvjbst.mount: invalid container name I0125 05:16:38.141663 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:16:38.141671 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount", but ignoring. I0125 05:16:38.141679 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-e932e61a\\x2de2d9\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-registry\\x2dtoken\\x2dvjbst.mount" I0125 05:16:38.141690 4678 factory.go:104] Error trying to work out if we can handle /system.slice/dev-hugepages.mount: invalid container name I0125 05:16:38.141693 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/dev-hugepages.mount" I0125 05:16:38.141697 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/dev-hugepages.mount", but ignoring. I0125 05:16:38.141702 4678 manager.go:867] ignoring container "/system.slice/dev-hugepages.mount" I0125 05:16:38.141727 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir-pods-04c98b55\x2de2da\x2d11e6\x2da4b0\x2d0e6a5cbf0094-volumes-kubernetes.io\x7esecret-server\x2dcertificate.mount: invalid container name I0125 05:16:38.141730 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:16:38.141738 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount", but ignoring. I0125 05:16:38.141748 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir-pods-04c98b55\\x2de2da\\x2d11e6\\x2da4b0\\x2d0e6a5cbf0094-volumes-kubernetes.io\\x7esecret-server\\x2dcertificate.mount" I0125 05:16:38.141758 4678 factory.go:104] Error trying to work out if we can handle /system.slice/run-user-1000.mount: invalid container name I0125 05:16:38.141761 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/run-user-1000.mount" I0125 05:16:38.141765 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/run-user-1000.mount", but ignoring. I0125 05:16:38.141770 4678 manager.go:867] ignoring container "/system.slice/run-user-1000.mount" I0125 05:16:38.141779 4678 factory.go:104] Error trying to work out if we can handle /system.slice/mnt-openshift\x2dxfs\x2dvol\x2ddir.mount: invalid container name I0125 05:16:38.141782 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:16:38.141787 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount", but ignoring. I0125 05:16:38.141792 4678 manager.go:867] ignoring container "/system.slice/mnt-openshift\\x2dxfs\\x2dvol\\x2ddir.mount" I0125 05:16:38.141799 4678 factory.go:104] Error trying to work out if we can handle /system.slice/sys-kernel-config.mount: invalid container name I0125 05:16:38.141802 4678 factory.go:115] Factory "docker" was unable to handle container "/system.slice/sys-kernel-config.mount" I0125 05:16:38.141806 4678 factory.go:108] Factory "systemd" can handle container "/system.slice/sys-kernel-config.mount", but ignoring. I0125 05:16:38.141814 4678 manager.go:867] ignoring container "/system.slice/sys-kernel-config.mount" I0125 05:16:38.141826 4678 manager.go:955] Destroyed container: "/system.slice/docker-ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d.scope" (aliases: [k8s_postgresql-master.dfff9f08_postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8_daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094_566678ec ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d], namespace: "docker") I0125 05:16:38.141846 4678 handler.go:325] Added event &{/system.slice/docker-ebd85b26ebbac046bf279bb006590c17962462a904a89354944e5204a72b6e4d.scope 2017-01-25 05:16:38.141839907 -0500 EST containerDeletion {}} I0125 05:16:38.141868 4678 manager.go:955] Destroyed container: "/system.slice/docker-1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208.scope" (aliases: [k8s_postgresql.f954765b_postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8_b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094_422ec933 1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208], namespace: "docker") I0125 05:16:38.141876 4678 handler.go:325] Added event &{/system.slice/docker-1ebc67751226ce59fcf93505cef394202394c198f6d5700a5db990056ccbd208.scope 2017-01-25 05:16:38.141873579 -0500 EST containerDeletion {}} I0125 05:16:38.141886 4678 manager.go:955] Destroyed container: "/system.slice/docker-e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650.scope" (aliases: [k8s_POD.73b4fecf_postgresql-master-2-46j9k_extended-test-postgresql-replication-1-34bbd-xd4g8_daedc0da-e2e6-11e6-a4b0-0e6a5cbf0094_6093cf99 e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650], namespace: "docker") I0125 05:16:38.141893 4678 handler.go:325] Added event &{/system.slice/docker-e4a03b23568745673fd232715d3ada08a2943eb5de205419dc61195b2654a650.scope 2017-01-25 05:16:38.141891255 -0500 EST containerDeletion {}} I0125 05:16:38.141901 4678 manager.go:955] Destroyed container: "/system.slice/docker-969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017.scope" (aliases: [k8s_POD.73b4fecf_postgresql-helper-1-cpv6d_extended-test-postgresql-replication-1-34bbd-xd4g8_b76687cc-e2e6-11e6-a4b0-0e6a5cbf0094_777532d7 969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017], namespace: "docker") I0125 05:16:38.141908 4678 handler.go:325] Added event &{/system.slice/docker-969cb704ff80b655d8b0cb16c2ef1906cf37977d55137e7a770bbb1a6af73017.scope 2017-01-25 05:16:38.141906866 -0500 EST containerDeletion {}} I0125 05:16:38.459431 4678 audit.go:125] 2017-01-25T05:16:38.459401298-05:00 AUDIT: id="1697cb1c-9855-4085-b705-21a404c0670e" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/api/v1/nodes?resourceVersion=0" I0125 05:16:38.459809 4678 audit.go:45] 2017-01-25T05:16:38.459796748-05:00 AUDIT: id="1697cb1c-9855-4085-b705-21a404c0670e" response="200" I0125 05:16:38.460114 4678 panics.go:76] GET /api/v1/nodes?resourceVersion=0: (923.113µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:39.260985 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:39.261013 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:39.261683 4678 prober.go:159] HTTP-Probe Host: http://localhost, Port: 1936, Path: /healthz I0125 05:16:39.261699 4678 prober.go:162] HTTP-Probe Headers: map[] I0125 05:16:39.261986 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc42cf46de0 -1 [] true false map[] 0xc4339b3b30 } I0125 05:16:39.262035 4678 prober.go:113] Liveness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:39.262258 4678 http.go:82] Probe succeeded for http://localhost:1936/healthz, Response: {200 OK 200 HTTP/1.0 1 0 map[Content-Type:[text/html] Cache-Control:[no-cache] Connection:[close]] 0xc42cf46ec0 -1 [] true false map[] 0xc434cd94a0 } I0125 05:16:39.262288 4678 prober.go:113] Readiness probe for "router-2-tnqzg_default(04c98b55-e2da-11e6-a4b0-0e6a5cbf0094):router" succeeded I0125 05:16:39.284154 4678 audit.go:125] 2017-01-25T05:16:39.284108942-05:00 AUDIT: id="c0d2737d-27cf-4a8e-a195-0f219a26cc5e" ip="172.18.7.222" method="GET" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0" I0125 05:16:39.284561 4678 audit.go:45] 2017-01-25T05:16:39.284547898-05:00 AUDIT: id="c0d2737d-27cf-4a8e-a195-0f219a26cc5e" response="200" I0125 05:16:39.284867 4678 panics.go:76] GET /api/v1/nodes?fieldSelector=metadata.name%3D172.18.7.222&resourceVersion=0: (951.051µs) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:39.348707 4678 audit.go:125] 2017-01-25T05:16:39.34867773-05:00 AUDIT: id="5968ad6b-f30f-433b-96b9-31746f67dc0a" ip="172.18.7.222" method="PUT" user="system:node:172.18.7.222" as="" asgroups="" namespace="" uri="/api/v1/nodes/172.18.7.222/status" I0125 05:16:39.351718 4678 audit.go:45] 2017-01-25T05:16:39.351703786-05:00 AUDIT: id="5968ad6b-f30f-433b-96b9-31746f67dc0a" response="200" I0125 05:16:39.352515 4678 audit.go:125] 2017-01-25T05:16:39.352487382-05:00 AUDIT: id="f2d2be0b-f8f2-4218-9ed5-5ade2b4fd65d" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/cronjobs" I0125 05:16:39.352579 4678 panics.go:76] PUT /api/v1/nodes/172.18.7.222/status: (4.096496ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50864] I0125 05:16:39.353009 4678 attach_detach_controller.go:540] processVolumesInUse for node "172.18.7.222" I0125 05:16:39.353717 4678 audit.go:45] 2017-01-25T05:16:39.353706504-05:00 AUDIT: id="f2d2be0b-f8f2-4218-9ed5-5ade2b4fd65d" response="200" I0125 05:16:39.353787 4678 panics.go:76] GET /apis/batch/v2alpha1/cronjobs: (3.154116ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:39.354013 4678 controller.go:106] Found 0 cronjobs I0125 05:16:39.355669 4678 audit.go:125] 2017-01-25T05:16:39.355646887-05:00 AUDIT: id="2558d41e-cd04-46ef-b062-2f95bef5d21b" ip="172.18.7.222" method="GET" user="system:serviceaccount:openshift-infra:job-controller" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1/jobs" I0125 05:16:39.356395 4678 audit.go:45] 2017-01-25T05:16:39.356385636-05:00 AUDIT: id="2558d41e-cd04-46ef-b062-2f95bef5d21b" response="200" I0125 05:16:39.356452 4678 panics.go:76] GET /apis/batch/v2alpha1/jobs: (2.224658ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4 system:serviceaccount:openshift-infra:job-controller] 172.18.7.222:50846] I0125 05:16:39.356644 4678 controller.go:114] Found 0 jobs I0125 05:16:39.356652 4678 controller.go:117] Found 0 groups I0125 05:16:39.656157 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:39.656174 4678 conversion.go:134] failed to handle multiple devices for container. Skipping Filesystem stats I0125 05:16:39.684631 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:39.763393 4678 eviction_manager.go:269] eviction manager: no resources are starved I0125 05:16:41.684600 4678 kubelet.go:1858] SyncLoop (housekeeping) I0125 05:16:41.981171 4678 audit.go:125] 2017-01-25T05:16:41.981135034-05:00 AUDIT: id="77e112cd-b032-49f7-95e9-5a54bb5d8861" ip="172.18.7.222" method="GET" user="system:openshift-master" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1/thirdpartyresources" I0125 05:16:41.982272 4678 audit.go:45] 2017-01-25T05:16:41.982261423-05:00 AUDIT: id="77e112cd-b032-49f7-95e9-5a54bb5d8861" response="200" I0125 05:16:41.982355 4678 panics.go:76] GET /apis/extensions/v1beta1/thirdpartyresources: (1.430577ms) 200 [[openshift/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50794] I0125 05:16:42.475973 4678 audit.go:125] 2017-01-25T05:16:42.475923294-05:00 AUDIT: id="19855f9a-e8e2-4281-a82f-74f5401f159a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp" I0125 05:16:42.477005 4678 audit.go:45] 2017-01-25T05:16:42.476995893-05:00 AUDIT: id="19855f9a-e8e2-4281-a82f-74f5401f159a" response="404" I0125 05:16:42.477054 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp: (1.342121ms) 404 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.477736 4678 audit.go:125] 2017-01-25T05:16:42.477717767-05:00 AUDIT: id="d1ad1654-9e41-4b86-a28d-fe60b5172c44" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api" I0125 05:16:42.477847 4678 audit.go:45] 2017-01-25T05:16:42.47783929-05:00 AUDIT: id="d1ad1654-9e41-4b86-a28d-fe60b5172c44" response="200" I0125 05:16:42.477885 4678 panics.go:76] GET /api: (339.453µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.478462 4678 audit.go:125] 2017-01-25T05:16:42.478438835-05:00 AUDIT: id="a8142b11-2f75-4b2e-8741-d749f82f9143" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis" I0125 05:16:42.478574 4678 audit.go:45] 2017-01-25T05:16:42.478566244-05:00 AUDIT: id="a8142b11-2f75-4b2e-8741-d749f82f9143" response="200" I0125 05:16:42.478632 4678 panics.go:76] GET /apis: (362.684µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.479260 4678 audit.go:125] 2017-01-25T05:16:42.479240733-05:00 AUDIT: id="649d368c-4522-4425-9833-f4061e80fb8e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/apps/v1beta1" I0125 05:16:42.479369 4678 audit.go:45] 2017-01-25T05:16:42.479359894-05:00 AUDIT: id="649d368c-4522-4425-9833-f4061e80fb8e" response="200" I0125 05:16:42.479397 4678 panics.go:76] GET /apis/apps/v1beta1: (351.636µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.479984 4678 audit.go:125] 2017-01-25T05:16:42.479966003-05:00 AUDIT: id="7d56cf5f-1222-417b-b4fb-f1095a228094" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/authentication.k8s.io/v1beta1" I0125 05:16:42.480080 4678 audit.go:45] 2017-01-25T05:16:42.480073603-05:00 AUDIT: id="7d56cf5f-1222-417b-b4fb-f1095a228094" response="200" I0125 05:16:42.480103 4678 panics.go:76] GET /apis/authentication.k8s.io/v1beta1: (310.107µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.480698 4678 audit.go:125] 2017-01-25T05:16:42.48068037-05:00 AUDIT: id="c5ed0e0b-c20e-4393-97dd-da4984589b0c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/autoscaling/v1" I0125 05:16:42.480800 4678 audit.go:45] 2017-01-25T05:16:42.480792763-05:00 AUDIT: id="c5ed0e0b-c20e-4393-97dd-da4984589b0c" response="200" I0125 05:16:42.480823 4678 panics.go:76] GET /apis/autoscaling/v1: (315.391µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.481394 4678 audit.go:125] 2017-01-25T05:16:42.48137629-05:00 AUDIT: id="18670f1f-cdf6-418f-81aa-729890b4b6d4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/batch/v1" I0125 05:16:42.481515 4678 audit.go:45] 2017-01-25T05:16:42.481506879-05:00 AUDIT: id="18670f1f-cdf6-418f-81aa-729890b4b6d4" response="200" I0125 05:16:42.481538 4678 panics.go:76] GET /apis/batch/v1: (336.711µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.482145 4678 audit.go:125] 2017-01-25T05:16:42.482127075-05:00 AUDIT: id="539c97cb-9d91-4820-8917-269dc9b1fc6e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/batch/v2alpha1" I0125 05:16:42.482276 4678 audit.go:45] 2017-01-25T05:16:42.482267667-05:00 AUDIT: id="539c97cb-9d91-4820-8917-269dc9b1fc6e" response="200" I0125 05:16:42.482301 4678 panics.go:76] GET /apis/batch/v2alpha1: (360.973µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.482892 4678 audit.go:125] 2017-01-25T05:16:42.482874142-05:00 AUDIT: id="c4190081-c67d-4e7b-b021-359b6a64a89c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/certificates.k8s.io/v1alpha1" I0125 05:16:42.482987 4678 audit.go:45] 2017-01-25T05:16:42.482978719-05:00 AUDIT: id="c4190081-c67d-4e7b-b021-359b6a64a89c" response="200" I0125 05:16:42.483011 4678 panics.go:76] GET /apis/certificates.k8s.io/v1alpha1: (309.433µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.483593 4678 audit.go:125] 2017-01-25T05:16:42.483573862-05:00 AUDIT: id="cba77812-7506-4de8-9d0a-01c401faed24" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/extensions/v1beta1" I0125 05:16:42.483738 4678 audit.go:45] 2017-01-25T05:16:42.483730095-05:00 AUDIT: id="cba77812-7506-4de8-9d0a-01c401faed24" response="200" I0125 05:16:42.483799 4678 panics.go:76] GET /apis/extensions/v1beta1: (404.912µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.484428 4678 audit.go:125] 2017-01-25T05:16:42.484408995-05:00 AUDIT: id="6432a8f2-1b84-4359-a81a-808e0b8f3f89" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/policy/v1beta1" I0125 05:16:42.484535 4678 audit.go:45] 2017-01-25T05:16:42.484521275-05:00 AUDIT: id="6432a8f2-1b84-4359-a81a-808e0b8f3f89" response="200" I0125 05:16:42.484561 4678 panics.go:76] GET /apis/policy/v1beta1: (335.745µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.485145 4678 audit.go:125] 2017-01-25T05:16:42.485126104-05:00 AUDIT: id="aa0d41c6-84ba-4d9f-967b-a88427002219" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/apis/storage.k8s.io/v1beta1" I0125 05:16:42.485274 4678 audit.go:45] 2017-01-25T05:16:42.485264767-05:00 AUDIT: id="aa0d41c6-84ba-4d9f-967b-a88427002219" response="200" I0125 05:16:42.485299 4678 panics.go:76] GET /apis/storage.k8s.io/v1beta1: (341.764µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.485831 4678 audit.go:125] 2017-01-25T05:16:42.485813372-05:00 AUDIT: id="05814c3b-b2a7-4028-a4c6-71d85cb3836d" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="" uri="/api/v1" I0125 05:16:42.485986 4678 audit.go:45] 2017-01-25T05:16:42.485978442-05:00 AUDIT: id="05814c3b-b2a7-4028-a4c6-71d85cb3836d" response="200" I0125 05:16:42.486047 4678 panics.go:76] GET /api/v1: (394.214µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.486788 4678 audit.go:125] 2017-01-25T05:16:42.486764052-05:00 AUDIT: id="5bed7991-c3c4-4f08-8482-5857e3dae91c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/statefulsets" I0125 05:16:42.487623 4678 audit.go:45] 2017-01-25T05:16:42.487613924-05:00 AUDIT: id="5bed7991-c3c4-4f08-8482-5857e3dae91c" response="200" I0125 05:16:42.487689 4678 panics.go:76] GET /apis/apps/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/statefulsets: (1.105971ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.488437 4678 audit.go:125] 2017-01-25T05:16:42.488414157-05:00 AUDIT: id="7ad93441-6898-4a5a-b912-875f7c0bc9e3" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers" I0125 05:16:42.489157 4678 audit.go:45] 2017-01-25T05:16:42.489147173-05:00 AUDIT: id="7ad93441-6898-4a5a-b912-875f7c0bc9e3" response="200" I0125 05:16:42.489231 4678 panics.go:76] GET /apis/autoscaling/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers: (983.677µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.489931 4678 audit.go:125] 2017-01-25T05:16:42.489905926-05:00 AUDIT: id="b0216e83-08c9-4c1b-bfb7-7751f5193fcf" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs" I0125 05:16:42.490684 4678 audit.go:45] 2017-01-25T05:16:42.490674204-05:00 AUDIT: id="b0216e83-08c9-4c1b-bfb7-7751f5193fcf" response="200" I0125 05:16:42.490738 4678 panics.go:76] GET /apis/batch/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs: (1.010114ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.491456 4678 audit.go:125] 2017-01-25T05:16:42.491433965-05:00 AUDIT: id="9e132d90-2e0f-4466-879f-bdf33cfde9e3" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/cronjobs" I0125 05:16:42.492150 4678 audit.go:45] 2017-01-25T05:16:42.492139931-05:00 AUDIT: id="9e132d90-2e0f-4466-879f-bdf33cfde9e3" response="200" I0125 05:16:42.492214 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/cronjobs: (944.964µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.492843 4678 audit.go:125] 2017-01-25T05:16:42.492819245-05:00 AUDIT: id="7a26ad10-34d8-43c7-93f5-1a606d92d3e4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/scheduledjobs" I0125 05:16:42.493518 4678 audit.go:45] 2017-01-25T05:16:42.493508324-05:00 AUDIT: id="7a26ad10-34d8-43c7-93f5-1a606d92d3e4" response="200" I0125 05:16:42.493566 4678 panics.go:76] GET /apis/batch/v2alpha1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/scheduledjobs: (911.854µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.494292 4678 audit.go:125] 2017-01-25T05:16:42.494252401-05:00 AUDIT: id="a1ed89dd-e72e-44ee-9632-b89a29959340" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/daemonsets" I0125 05:16:42.495038 4678 audit.go:45] 2017-01-25T05:16:42.495028507-05:00 AUDIT: id="a1ed89dd-e72e-44ee-9632-b89a29959340" response="200" I0125 05:16:42.495087 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/daemonsets: (1.002775ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.495772 4678 audit.go:125] 2017-01-25T05:16:42.495749385-05:00 AUDIT: id="a9dce878-3acb-493b-ac8d-02ebbdeb5da4" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deployments" I0125 05:16:42.496499 4678 audit.go:45] 2017-01-25T05:16:42.496489371-05:00 AUDIT: id="a9dce878-3acb-493b-ac8d-02ebbdeb5da4" response="200" I0125 05:16:42.496548 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/deployments: (979.729µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.497234 4678 audit.go:125] 2017-01-25T05:16:42.497211288-05:00 AUDIT: id="204b9e14-8741-442a-bc29-4740936a399c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers" I0125 05:16:42.498055 4678 audit.go:45] 2017-01-25T05:16:42.498045059-05:00 AUDIT: id="204b9e14-8741-442a-bc29-4740936a399c" response="200" I0125 05:16:42.498126 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/horizontalpodautoscalers: (1.106604ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.498790 4678 audit.go:125] 2017-01-25T05:16:42.498767316-05:00 AUDIT: id="1b522f47-ef94-40ac-976b-079bbbcab37e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/ingresses" I0125 05:16:42.499491 4678 audit.go:45] 2017-01-25T05:16:42.499481847-05:00 AUDIT: id="1b522f47-ef94-40ac-976b-079bbbcab37e" response="200" I0125 05:16:42.499551 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/ingresses: (954.019µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.500238 4678 audit.go:125] 2017-01-25T05:16:42.500206314-05:00 AUDIT: id="bd2e6475-80dd-4ec2-ab8f-e587847e18b8" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs" I0125 05:16:42.500920 4678 audit.go:45] 2017-01-25T05:16:42.500910829-05:00 AUDIT: id="bd2e6475-80dd-4ec2-ab8f-e587847e18b8" response="200" I0125 05:16:42.500978 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/jobs: (951.569µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.501641 4678 audit.go:125] 2017-01-25T05:16:42.501618931-05:00 AUDIT: id="c6afa844-53fc-4145-8837-b7f5d2403f82" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/networkpolicies" I0125 05:16:42.502335 4678 audit.go:45] 2017-01-25T05:16:42.50232561-05:00 AUDIT: id="c6afa844-53fc-4145-8837-b7f5d2403f82" response="200" I0125 05:16:42.502397 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/networkpolicies: (934.374µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.503068 4678 audit.go:125] 2017-01-25T05:16:42.503045636-05:00 AUDIT: id="181f9fd8-523d-4e92-a28d-f4ba86ae2a3e" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicasets" I0125 05:16:42.503852 4678 audit.go:45] 2017-01-25T05:16:42.503842992-05:00 AUDIT: id="181f9fd8-523d-4e92-a28d-f4ba86ae2a3e" response="200" I0125 05:16:42.503906 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicasets: (1.039473ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.504592 4678 audit.go:125] 2017-01-25T05:16:42.504567103-05:00 AUDIT: id="7f279a2d-8e6e-4b66-8388-f9239cc97f3a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers" I0125 05:16:42.504705 4678 audit.go:45] 2017-01-25T05:16:42.504698587-05:00 AUDIT: id="7f279a2d-8e6e-4b66-8388-f9239cc97f3a" response="404" I0125 05:16:42.504737 4678 panics.go:76] GET /apis/extensions/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers: (342.382µs) 404 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.505498 4678 audit.go:125] 2017-01-25T05:16:42.505474842-05:00 AUDIT: id="ff828080-fd8a-4ec2-8ba0-557f3caa6f5c" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/poddisruptionbudgets" I0125 05:16:42.506237 4678 audit.go:45] 2017-01-25T05:16:42.506226888-05:00 AUDIT: id="ff828080-fd8a-4ec2-8ba0-557f3caa6f5c" response="200" I0125 05:16:42.506288 4678 panics.go:76] GET /apis/policy/v1beta1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/poddisruptionbudgets: (1.001875ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.507107 4678 audit.go:125] 2017-01-25T05:16:42.507084804-05:00 AUDIT: id="461ea04e-caaf-43d3-b308-eae21f388e9a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/configmaps" I0125 05:16:42.507917 4678 audit.go:45] 2017-01-25T05:16:42.50790758-05:00 AUDIT: id="461ea04e-caaf-43d3-b308-eae21f388e9a" response="200" I0125 05:16:42.507974 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/configmaps: (1.064585ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.508682 4678 audit.go:125] 2017-01-25T05:16:42.508660038-05:00 AUDIT: id="11aef6a6-3545-4f8f-ab3a-d1a6d612e4e9" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/endpoints" I0125 05:16:42.509477 4678 audit.go:45] 2017-01-25T05:16:42.509467463-05:00 AUDIT: id="11aef6a6-3545-4f8f-ab3a-d1a6d612e4e9" response="200" I0125 05:16:42.509533 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/endpoints: (1.063686ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.510205 4678 audit.go:125] 2017-01-25T05:16:42.51017224-05:00 AUDIT: id="03d7cdd0-3544-43f7-86a4-64ed3ebcefce" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events" I0125 05:16:42.511006 4678 audit.go:45] 2017-01-25T05:16:42.510995931-05:00 AUDIT: id="03d7cdd0-3544-43f7-86a4-64ed3ebcefce" response="200" I0125 05:16:42.511056 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/events: (1.052639ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.511752 4678 audit.go:125] 2017-01-25T05:16:42.511724914-05:00 AUDIT: id="f864206e-5b8a-4f2a-9566-3c98a277ac84" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/limitranges" I0125 05:16:42.512486 4678 audit.go:45] 2017-01-25T05:16:42.512476502-05:00 AUDIT: id="f864206e-5b8a-4f2a-9566-3c98a277ac84" response="200" I0125 05:16:42.512537 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/limitranges: (981.016µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.513220 4678 audit.go:125] 2017-01-25T05:16:42.513187015-05:00 AUDIT: id="4bc1a71d-9552-4d22-a9ef-840de4861284" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/persistentvolumeclaims" I0125 05:16:42.513944 4678 audit.go:45] 2017-01-25T05:16:42.513934396-05:00 AUDIT: id="4bc1a71d-9552-4d22-a9ef-840de4861284" response="200" I0125 05:16:42.513997 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/persistentvolumeclaims: (988.813µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.514696 4678 audit.go:125] 2017-01-25T05:16:42.514669469-05:00 AUDIT: id="7ae52c53-ec5a-4cbc-ba19-098b62910a27" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods" I0125 05:16:42.515487 4678 audit.go:45] 2017-01-25T05:16:42.51547574-05:00 AUDIT: id="7ae52c53-ec5a-4cbc-ba19-098b62910a27" response="200" I0125 05:16:42.515550 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/pods: (1.05322ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.516394 4678 audit.go:125] 2017-01-25T05:16:42.516362654-05:00 AUDIT: id="2dd92857-672f-4570-b1c7-dd58e7bf6b65" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/podtemplates" I0125 05:16:42.517169 4678 audit.go:45] 2017-01-25T05:16:42.517156498-05:00 AUDIT: id="2dd92857-672f-4570-b1c7-dd58e7bf6b65" response="200" I0125 05:16:42.517254 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/podtemplates: (1.148557ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.517903 4678 audit.go:125] 2017-01-25T05:16:42.517880385-05:00 AUDIT: id="da63091f-5814-4ad3-a929-26fd4a46852a" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers" I0125 05:16:42.518608 4678 audit.go:45] 2017-01-25T05:16:42.518598782-05:00 AUDIT: id="da63091f-5814-4ad3-a929-26fd4a46852a" response="200" I0125 05:16:42.518670 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/replicationcontrollers: (955.842µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.519354 4678 audit.go:125] 2017-01-25T05:16:42.519327942-05:00 AUDIT: id="6bd2c671-1e62-4ed0-921f-99c8874a525d" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas" I0125 05:16:42.520108 4678 audit.go:45] 2017-01-25T05:16:42.520098009-05:00 AUDIT: id="6bd2c671-1e62-4ed0-921f-99c8874a525d" response="200" I0125 05:16:42.520169 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/resourcequotas: (1.027828ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.520863 4678 audit.go:125] 2017-01-25T05:16:42.520841125-05:00 AUDIT: id="9b7e4049-8507-49a5-8c47-65f79a934757" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets" I0125 05:16:42.521626 4678 audit.go:45] 2017-01-25T05:16:42.521613458-05:00 AUDIT: id="9b7e4049-8507-49a5-8c47-65f79a934757" response="200" I0125 05:16:42.521677 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/secrets: (1.016818ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.522367 4678 audit.go:125] 2017-01-25T05:16:42.522345413-05:00 AUDIT: id="b0507ea6-2251-4e8b-ac33-0163996b8bb6" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts" I0125 05:16:42.523100 4678 audit.go:45] 2017-01-25T05:16:42.52309014-05:00 AUDIT: id="b0507ea6-2251-4e8b-ac33-0163996b8bb6" response="200" I0125 05:16:42.523149 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/serviceaccounts: (980.28µs) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.523829 4678 audit.go:125] 2017-01-25T05:16:42.523805662-05:00 AUDIT: id="aaf488ca-c229-4432-8779-682d44c33381" ip="172.18.7.222" method="GET" user="system:admin" as="" asgroups="" namespace="extended-test-postgresql-replication-2-7n81h-cp7jp" uri="/api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/services" I0125 05:16:42.524591 4678 audit.go:45] 2017-01-25T05:16:42.524581914-05:00 AUDIT: id="aaf488ca-c229-4432-8779-682d44c33381" response="200" I0125 05:16:42.524644 4678 panics.go:76] GET /api/v1/namespaces/extended-test-postgresql-replication-2-7n81h-cp7jp/services: (1.021279ms) 200 [[extended.test/v1.5.2+43a9be4 (linux/amd64) kubernetes/43a9be4] 172.18.7.222:50940] I0125 05:16:42.529860 4678 panics.go:76] GET /oauth/authorize?response_type=token&client_id=openshift-challenging-client: (812.886µs) 401 [[Go-http-client/2.0] 172.18.7.222:50940]