Hi Team,
I am installing ONAP Istanbul and the following pod is crashing with the
message: *Init:CreateContainerConfigError.* I have attached the pod describe
output and list of pods from the ONAP namespace.
Please advise on steps to fix this problem. Many Thanks.
root@aarna-node-09:~# *kubectl get pods -n onap dev-aaf-sms-preload-bb4qx*
NAME READY STATUS RESTARTS
AGE
dev-aaf-sms-preload-bb4qx 0/1 Init:CreateContainerConfigError 0
3h39m
root@aarna-node-09:~# *kubectl logs -n onap dev-aaf-sms-preload-bb4qx*
Error from server (BadRequest): container "aaf-sms-preload" in pod
"dev-aaf-sms-preload-bb4qx" is waiting to start: PodInitializing
-=-=-=-=-=-=-=-=-=-=-=-
Links: You receive all messages sent to this group.
View/Reply Online (#23659): https://lists.onap.org/g/onap-discuss/message/23659
Mute This Topic: https://lists.onap.org/mt/87304565/21656
Mute #aaf:https://lists.onap.org/g/onap-discuss/mutehashtag/aaf
Group Owner: [email protected]
Unsubscribe: https://lists.onap.org/g/onap-discuss/unsub
[[email protected]]
-=-=-=-=-=-=-=-=-=-=-=-
root@aarna-node-09:~# kubectl get pods -n onap
NAME READY STATUS
RESTARTS AGE
dev-a1policymanagement-0 2/2 Running
0 3h39m
dev-aaf-cass-54cd4f87f4-9qz46 1/1 Running
0 3h35m
dev-aaf-cm-59469556b5-dxm9x 1/1 Running
0 3h35m
dev-aaf-fs-68c469648f-vrdxz 1/1 Running
0 3h35m
dev-aaf-gui-76ffd496dc-v92jz 1/1 Running
0 3h35m
dev-aaf-locate-79c9b8fd7f-ckknv 1/1 Running
0 3h35m
dev-aaf-oauth-c9494ff5c-zzvmd 1/1 Running
0 3h35m
dev-aaf-service-85758fc566-bzxqk 1/1 Running
0 3h35m
dev-aaf-sms-7775cfb986-psw9c 1/1 Running
0 3h35m
dev-aaf-sms-preload-bb4qx 0/1
Init:CreateContainerConfigError 0 3h35m
dev-aaf-sms-quorumclient-0 1/1 Running
0 3h35m
dev-aaf-sms-quorumclient-1 1/1 Running
0 3h35m
dev-aaf-sms-quorumclient-2 1/1 Running
0 3h35m
dev-aaf-sms-vault-0 2/2 Running
0 3h35m
dev-aai-6f58cf75bb-42mrd 1/1 Running
0 3h31m
dev-aai-babel-9797cbcc8-qgwsg 2/2 Running
0 3h31m
dev-aai-graphadmin-67c4f6ddf5-gp8k4 2/2 Running
0 3h31m
dev-aai-graphadmin-create-db-schema-7sp6h 0/1 Completed
0 3h31m
dev-aai-modelloader-567c6d79f8-2j54r 2/2 Running
0 3h31m
dev-aai-resources-7d7b5c879-2jps7 2/2 Running
0 3h31m
dev-aai-schema-service-7d775bc47b-g8kmn 2/2 Running
0 3h31m
dev-aai-sparky-be-89765cd79-rxbgz 2/2 Running
0 3h31m
dev-aai-traversal-7c5d4c4779-2tjc8 2/2 Running
0 3h31m
dev-awx-0 4/4 Running
0 3h12m
dev-awx-7p9nv 0/1 Completed
0 3h12m
dev-awx-postgres-6bdcdb6bcf-dx8cv 1/1 Running
0 3h12m
dev-cassandra-0 1/1 Running
0 3h28m
dev-cassandra-1 1/1 Running
0 3h26m
dev-cassandra-2 1/1 Running
0 3h24m
dev-cds-blueprints-processor-58c8747d9f-bj84k 1/1 Running
0 3h24m
dev-cds-command-executor-8655b7d86-pswkg 1/1 Running
0 3h24m
dev-cds-db-0 2/2 Running
0 3h24m
dev-cds-py-executor-84b8fd8b78-rdqxm 1/1 Running
0 3h24m
dev-cds-sdc-listener-56ff6796d-s89rl 1/1 Running
0 3h24m
dev-cds-ui-8d99c6664-zgcpm 1/1 Running
0 3h24m
dev-cli-54f746b89c-6z5zx 1/1 Running
0 3h18m
dev-consul-57ff8dbd97-jlssf 1/1 Running
0 3h15m
dev-consul-server-0 1/1 Running
0 3h15m
dev-consul-server-1 1/1 Running
0 3h15m
dev-consul-server-2 1/1 Running
0 3h15m
dev-dbc-pg-primary-67cffdcb-2mlrx 1/1 Running
0 172m
dev-dbc-pg-replica-86977dbddd-26b68 1/1 Running
0 172m
dev-dcae-bootstrap-79898d6d55-5282v 1/1 Running
0 3h4m
dev-dcae-cloudify-manager-5dd4b647ff-xqbhj 1/1 Running
0 3h4m
dev-dcae-config-binding-service-7f975498c6-bnrhq 4/4 Running
0 3h4m
dev-dcae-dashboard-7b4bdf4d4c-bm5w9 1/2 Running
0 3h4m
dev-dcae-dashboard-pg-primary-89b4569fc-fr9jk 1/1 Running
0 3h4m
dev-dcae-dashboard-pg-replica-5d78569757-kqkvf 1/1 Running
0 3h4m
dev-dcae-db-primary-5bc6c4b896-6wczg 1/1 Running
0 3h4m
dev-dcae-db-replica-5846f4fc64-cpxrc 1/1 Running
0 3h4m
dev-dcae-deployment-handler-5c967554bc-ppjng 2/2 Running
0 3h4m
dev-dcae-healthcheck-674c94d89f-gh5pg 1/1 Running
0 3h4m
dev-dcae-hv-ves-collector-6fff4db548-bp7fs 2/2 Running
0 3h1m
dev-dcae-inv-pg-primary-696458fcd-h6llb 1/1 Running
0 3h4m
dev-dcae-inv-pg-replica-8449bb6d58-x9xr7 1/1 Running
0 3h4m
dev-dcae-inventory-api-5c58887d9b-dhrk8 2/2 Running
0 3h4m
dev-dcae-mongo-0 1/1 Running
0 3h4m
dev-dcae-ms-healthcheck-86f5fd5ff4-gpr45 1/1 Running
0 3h1m
dev-dcae-policy-handler-77ff494bfc-5v8wm 2/2 Running
0 3h4m
dev-dcae-prh-79c9d6d7bb-stfh4 2/2 Running
0 3h1m
dev-dcae-servicechange-handler-8c54d7d8d-7jnhm 1/1 Running
0 3h4m
dev-dcae-tcagen2-c6d9dbb98-wnn7z 2/2 Running
0 3h1m
dev-dcae-ves-collector-8597cd857b-54rfs 2/2 Running
0 3h1m
dev-dcae-ves-openapi-manager-5d856c74cb-587gq 1/1 Running
0 3h4m
dev-dcaemod-db-primary-5b44f59fcb-8lcjz 1/1 Running
0 177m
dev-dcaemod-db-replica-654f49bffb-d7jf5 1/1 Running
0 177m
dev-dcaemod-designtool-59dc5d9867-sv7nm 1/1 Running
0 177m
dev-dcaemod-distributor-api-5d664bd65f-vmhvl 1/1 Running
0 177m
dev-dcaemod-genprocessor-6fdc9bf7fc-xszmc 2/2 Running
0 177m
dev-dcaemod-healthcheck-fcb48b7b4-jt87f 1/1 Running
0 177m
dev-dcaemod-nifi-registry-6bf9f5fcbd-wl669 1/1 Running
0 177m
dev-dcaemod-onboarding-api-855fdd5bbd-bqjn4 1/1 Running
0 177m
dev-dcaemod-runtime-api-796b9b44f6-km957 1/1 Running
0 177m
dev-dmaap-bc-5877cb5bbf-9fnn5 1/1 Running
0 172m
dev-dmaap-bc-dmaap-provisioning-t8kww 0/1 Completed
0 117m
dev-dmaap-dr-mariadb-init-config-job-hmmm6 0/1 Completed
0 172m
dev-dmaap-dr-node-0 2/2 Running
0 172m
dev-dmaap-dr-prov-86c9455c5-kr4g4 2/2 Running
0 172m
dev-ejbca-5bdf4556bd-lcljp 1/1 Running
0 3h12m
dev-ejbca-config-config-job-rpzg4 0/1 Completed
0 172m
dev-holmes-engine-mgmt-6c6ff79876-pq55n 1/1 Running
0 169m
dev-holmes-pg-primary-786ccc8bb7-w5hnw 1/1 Running
0 169m
dev-holmes-pg-replica-7dbf6957d7-jsklp 1/1 Running
0 169m
dev-holmes-rule-mgmt-f44f9b8fc-zw545 1/1 Running
0 169m
dev-kube2msb-6d85b7489b-b25fb 1/1 Running
0 160m
dev-mariadb-galera-0 2/2 Running
0 166m
dev-mariadb-galera-1 2/2 Running
0 166m
dev-mariadb-galera-2 2/2 Running
0 165m
dev-message-router-0 1/1 Running
0 172m
dev-message-router-kafka-0 1/1 Running
0 172m
dev-message-router-kafka-1 1/1 Running
0 172m
dev-message-router-kafka-2 1/1 Running
0 172m
dev-message-router-zookeeper-0 1/1 Running
0 172m
dev-message-router-zookeeper-1 1/1 Running
0 172m
dev-message-router-zookeeper-2 1/1 Running
0 172m
dev-modeling-etsicatalog-7c8fb8db94-fj64f 2/2 Running
0 163m
dev-modeling-etsicatalog-config-job-jd8mh 0/1 Completed
0 163m
dev-msb-consul-55bf59f97f-f4z7m 1/1 Running
0 160m
dev-msb-discovery-d88557b4c-w8xkq 2/2 Running
0 160m
dev-msb-eag-5fd5f9889-ktxjh 2/2 Running
0 160m
dev-msb-iag-5f67445cf8-qqkqk 2/2 Running
0 160m
dev-multicloud-84d7df49dd-9cb6g 2/2 Running
0 157m
dev-multicloud-fcaps-6898c968f5-989ks 3/3 Running
0 157m
dev-multicloud-k8s-75cd8cdb47-gkfvd 2/2 Running
0 157m
dev-multicloud-k8s-etcd-0 1/1 Running
0 157m
dev-multicloud-k8s-mongo-0 1/1 Running
0 157m
dev-multicloud-pike-6fc65bb698-km5lf 2/2 Running
0 157m
dev-nbi-78d7dc959b-v6txg 1/1 Running
0 154m
dev-nbi-config-config-job-4qgct 0/1 Completed
0 154m
dev-nbi-mongo-0 1/1 Running
0 154m
dev-nengdb-init-config-job-xhdjd 0/1 Completed
0 8m9s
dev-netbox-app-6f5b4f6575-j6kpn 1/1 Running
1 3h12m
dev-netbox-app-provisioning-vck2z 0/1 Completed
0 3h12m
dev-netbox-nginx-66d8c867d8-fg7qw 1/1 Running
0 3h12m
dev-netbox-postgres-b9576cb79-5pmww 1/1 Running
0 3h12m
dev-network-name-gen-946c669c4-cttqh 1/1 Running
0 8m9s
dev-oof-79bf8c6ffc-fvd4q 0/1 Init:1/4
3 150m
dev-oof-has-api-5c64c7b595-7drzb 0/2 Init:0/4
14 150m
dev-oof-has-controller-869456c6c4-5b674 0/1 Init:1/2
0 150m
dev-oof-has-data-6d858fd565-wkqrc 0/1 Init:1/2
0 150m
dev-oof-has-etcd-0 1/1 Running
0 150m
dev-oof-has-etcd-1 1/1 Running
0 150m
dev-oof-has-etcd-2 1/1 Running
0 150m
dev-oof-has-etcd-config-job-9thtl 0/1 Completed
0 150m
dev-oof-has-reservation-6d5c9845c5-tkxtp 0/1 Init:1/2
0 150m
dev-oof-has-solver-b74f44d6d-ntfqk 0/1 Init:1/2
0 150m
dev-policy-apex-pdp-0 1/1 Running
0 133m
dev-policy-api-7fb87968f4-8t22v 1/1 Running
0 133m
dev-policy-clamp-be-7d6478cd65-84w2z 1/1 Running
0 133m
dev-policy-clamp-cl-http-ppnt-6b4954cb9c-td594 1/1 Running
0 133m
dev-policy-clamp-cl-k8s-ppnt-78db5d58f-tdjnb 1/1 Running
0 133m
dev-policy-clamp-cl-pf-ppnt-dfc9886b8-xjbvr 1/1 Running
0 133m
dev-policy-clamp-cl-runtime-df458c849-zcfcv 1/1 Running
0 133m
dev-policy-clamp-fe-774ff57c96-g5496 1/1 Running
0 133m
dev-policy-clamp-galera-config-knqbt 0/1 Completed
0 123m
dev-policy-distribution-747f6bbbf5-cz988 1/1 Running
0 133m
dev-policy-drools-pdp-0 1/1 Running
0 133m
dev-policy-galera-config-9w42v 0/1 Completed
0 123m
dev-policy-gui-9f99bd8cf-g4xxc 1/1 Running
0 133m
dev-policy-mariadb-0 2/2 Running
0 133m
dev-policy-pap-8bdf6ff47-bnqnx 1/1 Running
0 133m
dev-policy-xacml-pdp-8fb6df87d-l6qw9 1/1 Running
0 133m
dev-portal-app-797b4b7565-mwfxs 2/2 Running
0 130m
dev-portal-cassandra-5f7fcf6bcd-2rshr 1/1 Running
0 130m
dev-portal-db-7484f9fbd8-xd6vl 1/1 Running
0 130m
dev-portal-db-config-q5cm8 0/2 Completed
0 130m
dev-portal-sdk-74d4fdb46f-9jdx8 2/2 Running
0 130m
dev-portal-widget-6df97b4895-4wg8x 1/1 Running
0 130m
dev-postgres-primary-76d597bb96-rj294 1/1 Running
0 127m
dev-postgres-replica-85ddd744c6-2xxc8 1/1 Running
0 127m
dev-robot-695b496f8c-5cbr9 1/1 Running
0 120m
dev-sdc-be-787d85686d-rb4s6 2/2 Running
0 114m
dev-sdc-be-config-backend-2p4pf 0/1 Completed
0 114m
dev-sdc-cs-config-cassandra-9r2fl 0/1 Completed
0 114m
dev-sdc-fe-6886dbbfb5-n87s8 2/2 Running
0 114m
dev-sdc-helm-validator-7f9f8797df-2zg6f 1/1 Running
0 114m
dev-sdc-onboarding-be-5ccd476bf7-lrmmz 2/2 Running
0 114m
dev-sdc-onboarding-be-cassandra-init-87dmq 0/1 Completed
0 114m
dev-sdc-wfd-be-84b8c8d4bb-vr8pf 1/1 Running
0 114m
dev-sdc-wfd-be-workflow-init-jhdt7 0/1 Completed
0 114m
dev-sdc-wfd-fe-6586f8d889-6zq9d 2/2 Running
0 114m
dev-sdnc-0 1/2 Running
0 8m9s
dev-sdnc-ansible-server-7df98d44f9-nvlmw 0/1 Init:1/2
0 8m9s
dev-sdnc-dbinit-job-t4cm2 0/1 Completed
0 8m9s
dev-sdnc-dgbuilder-55c64f79f5-7dhx2 1/1 Running
0 8m9s
dev-sdnc-dmaap-listener-9bd45b7fd-lzd2q 0/1 Init:1/2
0 8m9s
dev-sdnc-sdnrdb-init-job-djcr7 0/1 Completed
0 8m9s
dev-sdnc-ueb-listener-84ddd79554-tn2f5 0/1 Init:1/2
0 8m9s
dev-sdnc-web-5c4478797c-f99sm 0/1 Init:2/3
0 8m9s
dev-sdnrdb-coordinating-only-6997977855-jbmn6 2/2 Running
0 8m9s
dev-sdnrdb-master-0 1/1 Running
0 8m9s
dev-sdnrdb-master-1 1/1 Running
0 8m1s
dev-sdnrdb-master-2 1/1 Running
0 7m55s
dev-so-5b6f68c857-bspl5 2/2 Running
0 85m
dev-so-admin-cockpit-77c9f56ddd-pdjfg 1/1 Running
0 85m
dev-so-bpmn-infra-55b688655-n6zjq 2/2 Running
0 85m
dev-so-catalog-db-adapter-78c7b8f66b-l2rjl 1/1 Running
0 85m
dev-so-cnf-adapter-8658ddffd7-tvz4x 1/1 Running
0 85m
dev-so-etsi-nfvo-ns-lcm-65f4c54c87-2fhxg 1/1 Running
0 85m
dev-so-etsi-sol003-adapter-77d94d5d95-hhlcz 1/1 Running
0 85m
dev-so-etsi-sol005-adapter-d48dc8fb8-94rpc 1/1 Running
0 85m
dev-so-mariadb-config-job-fpxn9 0/1 Completed
0 85m
dev-so-nssmf-adapter-54cbf8dcd8-7rs78 1/1 Running
0 85m
dev-so-oof-adapter-85fb577fb9-fck9r 2/2 Running
0 85m
dev-so-openstack-adapter-59f84fc9d-cm24d 2/2 Running
0 85m
dev-so-request-db-adapter-c466f8bcd-7s79g 1/1 Running
0 85m
dev-so-sdc-controller-5c58c47596-6hhdg 2/2 Running
0 85m
dev-so-sdnc-adapter-7f8f679f89-pk5xt 2/2 Running
0 85m
dev-so-tls-cert-set-tls-secret-djvzk 0/1 Completed
0 85m
dev-uui-5cb485994d-6wqbf 1/1 Running
0 81m
dev-uui-server-5b7fb4fdf7-627qv 1/1 Running
0 81m
dev-vfc-generic-vnfm-driver-5646879bb4-8s7mg 2/2 Running
0 78m
dev-vfc-huawei-vnfm-driver-7bf6f856c-zg2bs 2/2 Running
0 78m
dev-vfc-nslcm-6649796b-r8xrh 2/2 Running
0 78m
dev-vfc-redis-67dd44d594-555d4 1/1 Running
0 78m
dev-vfc-vnflcm-6cbd7dbb85-vq26n 2/2 Running
0 78m
dev-vfc-vnfmgr-79bc4457dd-m8bhh 2/2 Running
0 78m
dev-vfc-vnfres-66c85d66db-kv6fk 2/2 Running
0 78m
dev-vfc-zte-vnfm-driver-6767f64c55-6m52v 2/2 Running
0 78m
dev-vid-946b54ddd-6sffw 2/2 Running
0 75m
dev-vid-mariadb-init-config-job-j4zp5 0/1 Completed
0 75m
dev-vnfsdk-7967599c6f-dkf4r 1/1 Running
0 72m
dev-vnfsdk-init-postgres-cbklr 0/1 Completed
0 72m
dev-vnfsdk-postgres-primary-f86c68678-jz26v 1/1 Running
0 72m
dev-vnfsdk-postgres-replica-7466846bfd-mt2xg 1/1 Running
0 72m
root@aarna-node-09:~# kubectl describe pod -n onap dev-aaf-sms-preload-bb4qx
Name: dev-aaf-sms-preload-bb4qx
Namespace: onap
Priority: 0
Node: anod-worker-01/151.106.9.202
Start Time: Thu, 25 Nov 2021 13:02:49 +0100
Labels: app=aaf-sms
controller-uid=198b4c6c-bf97-41c1-8711-bef906b417f4
job-name=dev-aaf-sms-preload
release=dev
Annotations: cni.projectcalico.org/podIP: 10.42.2.185/32
cni.projectcalico.org/podIPs: 10.42.2.185/32
Status: Pending
IP: 10.42.2.185
IPs:
IP: 10.42.2.185
Controlled By: Job/dev-aaf-sms-preload
Init Containers:
aaf-sms-cert-init-readiness:
Container ID:
docker://2c45f34f4d439844a8a605ec795d1b7d2f2738a5d54a9b65f96435514b8b1645
Image: nexus3.onap.org:10001/onap/oom/readiness:3.0.1
Image ID:
docker-pullable://nexus3.onap.org:10001/onap/oom/readiness@sha256:317c8a361ae73750f4d4a1b682c42b73de39083f73228dede31fd68b16c089db
Port: <none>
Host Port: <none>
Command:
/app/ready.py
Args:
--container-name
aaf-locate
--container-name
aaf-cm
--container-name
aaf-service
State: Terminated
Reason: Completed
Exit Code: 0
Started: Thu, 25 Nov 2021 13:17:14 +0100
Finished: Thu, 25 Nov 2021 13:17:41 +0100
Ready: True
Restart Count: 0
Limits:
cpu: 100m
memory: 100Mi
Requests:
cpu: 3m
memory: 20Mi
Environment:
NAMESPACE: onap (v1:metadata.namespace)
Mounts:
/var/run/secrets/kubernetes.io/serviceaccount from default-token-lc4kb
(ro)
aaf-sms-aaf-config:
Container ID:
docker://479809e1e80172db0660a0ae7d3b5c85b99c348fd79b3f3f9615666e1a1a6062
Image: nexus3.onap.org:10001/onap/aaf/aaf_agent:2.1.20
Image ID:
docker-pullable://nexus3.onap.org:10001/onap/aaf/aaf_agent@sha256:fbc54287e95950956aeb8c5f86f2618fb52192aee389e2fa212c183395bb5c2b
Port: <none>
Host Port: <none>
Command:
sh
-c
/opt/app/aaf_config/bin/agent.sh
. /opt/app/aaf_config/bin/retrieval_check.sh
/opt/app/aaf_config/bin/aaf-add-config.sh
State: Terminated
Reason: Completed
Exit Code: 0
Started: Thu, 25 Nov 2021 13:17:42 +0100
Finished: Thu, 25 Nov 2021 13:17:55 +0100
Ready: True
Restart Count: 0
Environment:
APP_FQI: [email protected]
aaf_locate_url: https://aaf-locate.onap:8095
aaf_locator_container: oom
aaf_locator_container_ns: onap
aaf_locator_fqdn: aaf-sms
aaf_locator_app_ns: org.osaaf.aaf
DEPLOY_FQI: <set to the key 'login' in secret
'dev-aaf-sms-cert-init-deployer-creds'> Optional: false
DEPLOY_PASSWORD: <set to the key 'password' in secret
'dev-aaf-sms-cert-init-deployer-creds'> Optional: false
cadi_longitude: 0.0
cadi_latitude: 0.0
aaf_locator_public_fqdn: aaf-sms.onap.org
Mounts:
/opt/app/aaf_config/bin/aaf-add-config.sh from aaf-add-config
(rw,path="aaf-add-config.sh")
/opt/app/aaf_config/bin/retrieval_check.sh from aaf-add-config
(rw,path="retrieval_check.sh")
/opt/app/aaf_config/cert/truststoreONAP.p12.b64 from aaf-agent-certs
(rw,path="truststoreONAP.p12.b64")
/opt/app/aaf_config/cert/truststoreONAPall.jks.b64 from aaf-agent-certs
(rw,path="truststoreONAPall.jks.b64")
/opt/app/osaaf from dev-aaf-sms-aaf-config (rw)
/var/run/secrets/kubernetes.io/serviceaccount from default-token-lc4kb
(ro)
aaf-sms-update-config:
Container ID:
Image: docker.io/dibi/envsubst:1
Image ID:
Port: <none>
Host Port: <none>
Command:
sh
Args:
-c
export AAI_PASS=${AAI_PASS_PLAIN}; export
CONDUCTOR_PASS=${CONDUCTOR_PASS_PLAIN}; export SDNC_PASS=${SDNC_PASS_PLAIN};
export MUSIC_PASS=${MUSIC_PASS_PLAIN}; export AAF_PASS=${AAF_PASS_PLAIN};
export POLICY_PLAT_PASS=${POLICY_PLAT_PASS_PLAIN}; export
POLICY_CLI_PASS=${POLICY_CLI_PASS_PLAIN}; export
OSDF_PLACEMENT_PASS=${OSDF_PLACEMENT_PASS_PLAIN}; export
OSDF_PLACEMENT_SO_PASS=${OSDF_PLACEMENT_SO_PASS_PLAIN}; export
OSDF_PLACMENET_VFC_PASS=${OSDF_PLACEMENT_VFC_PASS_PLAIN}; export
OSDF_CM_SCHEDULER_PASS=${OSDF_CM_SCHEDULER_PASS_PLAIN}; export
CONFIG_DB_PASS=${CONFIG_DB_PASS_PLAIN}; export
OSDF_PCI_OPT_PASS=${OSDF_PCI_OPT_PASS_PLAIN}; export
OSDF_OPT_ENGINE_PASS=${OSDF_OPT_ENGINE_PASS_PLAIN}; export
SO_PASS=${SO_PASS_PLAIN}; export SDC_PASS=${SDC_PASS_PLAIN}; export
CPS_PASS=${CPS_PASS_PLAIN}; cd /config-input; for PFILE in `find . -not -type d
| grep -v -F ..`; do envsubst <${PFILE} >/config/${PFILE}; done
State: Waiting
Reason: CreateContainerConfigError
Ready: False
Restart Count: 0
Environment:
AAI_USER: <set to the key 'login' in secret
'dev-aaf-sms-aai-creds'> Optional: false
AAI_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-aai-creds'> Optional: false
CONDUCTOR_USER: <set to the key 'login' in secret
'dev-aaf-sms-conductor-creds'> Optional: false
CONDUCTOR_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-conductor-creds'> Optional: false
SDNC_USER: <set to the key 'login' in secret
'dev-aaf-sms-sdnc-creds'> Optional: false
SDNC_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-sdnc-creds'> Optional: false
MUSIC_USER: <set to the key 'login' in secret
'dev-aaf-sms-music-creds'> Optional: false
MUSIC_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-music-creds'> Optional: false
AAF_USER: <set to the key 'login' in secret
'dev-aaf-sms-aaf-creds'> Optional: false
AAF_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-aaf-creds'> Optional: false
POLICY_PLAT_USER: <set to the key 'login' in secret
'dev-aaf-sms-policy-plat-creds'> Optional: false
POLICY_PLAT_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-policy-plat-creds'> Optional: false
POLICY_CLI_USER: <set to the key 'login' in secret
'dev-aaf-sms-policy-cli-creds'> Optional: false
POLICY_CLI_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-policy-cli-creds'> Optional: false
OSDF_PLACEMENT_USER: <set to the key 'login' in secret
'dev-aaf-sms-osdf-placement-creds'> Optional: false
OSDF_PLACEMENT_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-osdf-placement-creds'> Optional: false
OSDF_PLACEMENT_SO_USER: <set to the key 'login' in secret
'dev-aaf-sms-osdf-placement-so-creds'> Optional: false
OSDF_PLACEMENT_SO_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-osdf-placement-so-creds'> Optional: false
OSDF_PLACEMENT_VFC_USER: <set to the key 'login' in secret
'dev-aaf-sms-osdf-placement-vfc-creds'> Optional: false
OSDF_PLACEMENT_VFC_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-osdf-placement-vfc-creds'> Optional: false
OSDF_CM_SCHEDULER_USER: <set to the key 'login' in secret
'dev-aaf-sms-osdf-cm-scheduler-creds'> Optional: false
OSDF_CM_SCHEDULER_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-osdf-cm-scheduler-creds'> Optional: false
CONFIG_DB_USER: <set to the key 'login' in secret
'dev-aaf-sms-config-db-creds'> Optional: false
CONFIG_DB_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-config-db-creds'> Optional: false
OSDF_PCI_OPT_USER: <set to the key 'login' in secret
'dev-aaf-sms-osdf-pci-opt-creds'> Optional: false
OSDF_PCI_OPT_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-osdf-pci-opt-creds'> Optional: false
OSDF_OPT_ENGINE_USER: <set to the key 'login' in secret
'dev-aaf-sms-osdf-opt-engine-creds'> Optional: false
OSDF_OPT_ENGINE_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-osdf-opt-engine-creds'> Optional: false
SO_USER: <set to the key 'login' in secret
'dev-aaf-sms-so-creds'> Optional: false
SO_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-so-creds'> Optional: false
SDC_USER: <set to the key 'login' in secret
'dev-aaf-sms-sdc-creds'> Optional: false
SDC_PASS_PLAIN: <set to the key 'password' in secret
'dev-aaf-sms-sdc-creds'> Optional: false
CPS_USER: <set to the key 'login' in secret
'dev-cps-core-app-user-creds'> Optional: false
CPS_PASS_PLAIN: <set to the key 'password' in secret
'dev-cps-core-app-user-creds'> Optional: false
Mounts:
/config-input from aaf-sms-preload-input (rw)
/config/ from aaf-sms-preload (rw)
/var/run/secrets/kubernetes.io/serviceaccount from default-token-lc4kb
(ro)
aaf-sms-readiness:
Container ID:
Image: nexus3.onap.org:10001/onap/oom/readiness:3.0.1
Image ID:
Port: <none>
Host Port: <none>
Command:
/app/ready.py
Args:
--container-name
aaf-sms
--container-name
aaf-sms-quorumclient
State: Waiting
Reason: PodInitializing
Ready: False
Restart Count: 0
Environment:
NAMESPACE: onap (v1:metadata.namespace)
Mounts:
/var/run/secrets/kubernetes.io/serviceaccount from default-token-lc4kb
(ro)
Containers:
aaf-sms-preload:
Container ID:
Image: nexus3.onap.org:10001/onap/aaf/sms:4.0.2
Image ID:
Port: <none>
Host Port: <none>
Command:
/sms/bin/preload
-cacert
/opt/app/osaaf/local/aaf_root_ca.cer
-jsondir
/preload/config
-serviceport
10443
-serviceurl
https://aaf-sms.onap
State: Waiting
Reason: PodInitializing
Ready: False
Restart Count: 0
Environment: <none>
Mounts:
/etc/localtime from localtime (ro)
/opt/app/osaaf from dev-aaf-sms-aaf-config (rw)
/preload/config from aaf-sms-preload (rw)
/var/run/secrets/kubernetes.io/serviceaccount from default-token-lc4kb
(ro)
Conditions:
Type Status
Initialized False
Ready False
ContainersReady False
PodScheduled True
Volumes:
dev-aaf-sms-aaf-config:
Type: EmptyDir (a temporary directory that shares a pod's lifetime)
Medium: Memory
SizeLimit: <unset>
aaf-agent-certs:
Type: ConfigMap (a volume populated by a ConfigMap)
Name: dev-cert-wrapper-certs
Optional: false
aaf-add-config:
Type: ConfigMap (a volume populated by a ConfigMap)
Name: dev-aaf-sms-cert-init-add-config
Optional: false
localtime:
Type: HostPath (bare host directory volume)
Path: /etc/localtime
HostPathType:
aaf-sms-preload-input:
Type: ConfigMap (a volume populated by a ConfigMap)
Name: dev-aaf-sms-preload
Optional: false
aaf-sms-preload:
Type: EmptyDir (a temporary directory that shares a pod's lifetime)
Medium: Memory
SizeLimit: <unset>
default-token-lc4kb:
Type: Secret (a volume populated by a Secret)
SecretName: default-token-lc4kb
Optional: false
QoS Class: Burstable
Node-Selectors: <none>
Tolerations: node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal Pulled 4m23s (x899 over 3h19m) kubelet Container image
"docker.io/dibi/envsubst:1" already present on machine