kubelet container status calculation doesn't handle suddenly missing data properly

This commit is contained in:
David Eads
2020-10-14 10:47:04 -04:00
parent 6d01c5a589
commit ff7d1444f0
2 changed files with 60 additions and 6 deletions

View File

@@ -1607,6 +1607,38 @@ func TestGenerateAPIPodStatusWithReasonCache(t *testing.T) {
}},
},
},
// For Unknown Container Status:
// * In certain situations a container can be running and fail to retrieve the status which results in
// * a transition to the Unknown state. Prior to this fix, a container would make an invalid transition
// * from Running->Waiting. This test validates the correct behavior of transitioning from Running->Terminated.
{
containers: []v1.Container{{Name: "unknown"}},
statuses: []*kubecontainer.Status{
{
Name: "unknown",
State: kubecontainer.ContainerStateUnknown,
},
{
Name: "unknown",
State: kubecontainer.ContainerStateRunning,
},
},
reasons: map[string]error{},
oldStatuses: []v1.ContainerStatus{{
Name: "unknown",
State: v1.ContainerState{Running: &v1.ContainerStateRunning{}},
}},
expectedState: map[string]v1.ContainerState{
"unknown": {Terminated: &v1.ContainerStateTerminated{
ExitCode: 137,
Message: "The container could not be located when the pod was terminated",
Reason: "ContainerStatusUnknown",
}},
},
expectedLastTerminationState: map[string]v1.ContainerState{
"unknown": {Running: &v1.ContainerStateRunning{}},
},
},
}
for i, test := range tests {