Skip to content

Commit

Permalink
refactor: added http status code enums/library (#987)
Browse files Browse the repository at this point in the history
Signed-off-by: Jeromy Cannon <[email protected]>
  • Loading branch information
jeromy-cannon authored Dec 13, 2024
1 parent 79410f0 commit 06e6026
Show file tree
Hide file tree
Showing 7 changed files with 31 additions and 19 deletions.
6 changes: 6 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@
"esm": "^3.2.25",
"figlet": "^1.8.0",
"got": "^14.4.5",
"http-status-codes": "^2.3.0",
"inquirer": "^12.2.0",
"ip": "^2.0.1",
"js-base64": "^3.7.7",
Expand Down
3 changes: 2 additions & 1 deletion src/core/config/remote/remote_config_manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import type {DeploymentStructure} from '../local_config_data.js';
import {type ContextClusterStructure} from '../../../types/config_types.js';
import {type EmptyContextConfig, type Optional, type SoloListrTask} from '../../../types/index.js';
import type * as k8s from '@kubernetes/client-node';
import {StatusCodes} from 'http-status-codes';

interface ListrContext {
config: {contextCluster: ContextClusterStructure};
Expand Down Expand Up @@ -226,7 +227,7 @@ export class RemoteConfigManager {
try {
return await this.k8.getNamespacedConfigMap(constants.SOLO_REMOTE_CONFIGMAP_NAME);
} catch (error: any) {
if (error.meta.statusCode !== 404) {
if (error.meta.statusCode !== StatusCodes.NOT_FOUND) {
throw new SoloError('Failed to read remote config from cluster', error);
}

Expand Down
31 changes: 16 additions & 15 deletions src/core/k8.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import {type V1Lease, V1ObjectMeta, V1Secret, type Context, type V1Pod} from '@k
import * as stream from 'node:stream';
import type * as http from 'node:http';
import type * as WebSocket from 'ws';
import {getReasonPhrase, StatusCodes} from 'http-status-codes';

import {sleep} from './helpers.js';
import * as constants from './constants.js';
Expand Down Expand Up @@ -159,7 +160,7 @@ export class K8 {
};

const resp = await this.kubeClient.createNamespace(payload);
return resp.response.statusCode === 201;
return resp.response.statusCode === StatusCodes.CREATED;
}

/**
Expand All @@ -168,7 +169,7 @@ export class K8 {
*/
async deleteNamespace(name: string) {
const resp = await this.kubeClient.deleteNamespace(name);
return resp.response.statusCode === 200.0;
return resp.response.statusCode === StatusCodes.OK;
}

/** Get a list of namespaces */
Expand Down Expand Up @@ -1199,7 +1200,7 @@ export class K8 {
async deletePvc(name: string, namespace: string) {
const resp = await this.kubeClient.deleteNamespacedPersistentVolumeClaim(name, namespace);

return resp.response.statusCode === 200.0;
return resp.response.statusCode === StatusCodes.OK;
}

// --------------------------------------- Utility Methods --------------------------------------- //
Expand Down Expand Up @@ -1238,7 +1239,7 @@ export class K8 {
Duration.ofMinutes(5).toMillis(),
);

if (result.response.statusCode === 200 && result.body.items && result.body.items.length > 0) {
if (result.response.statusCode === StatusCodes.OK && result.body.items && result.body.items.length > 0) {
const secretObject = result.body.items[0];
return {
name: secretObject.metadata!.name as string,
Expand Down Expand Up @@ -1289,7 +1290,7 @@ export class K8 {
try {
const resp = await this.kubeClient.createNamespacedSecret(namespace, v1Secret);

return resp.response.statusCode === 201;
return resp.response.statusCode === StatusCodes.CREATED;
} catch (e: Error | any) {
throw new SoloError(
`failed to create secret ${name} in namespace ${namespace}: ${e.message}, ${e?.body?.message}`,
Expand All @@ -1306,7 +1307,7 @@ export class K8 {
*/
async deleteSecret(name: string, namespace: string) {
const resp = await this.kubeClient.deleteNamespacedSecret(name, namespace);
return resp.response.statusCode === 200.0;
return resp.response.statusCode === StatusCodes.OK;
}

/* ------------- ConfigMap ------------- */
Expand Down Expand Up @@ -1347,7 +1348,7 @@ export class K8 {
try {
const resp = await this.kubeClient.createNamespacedConfigMap(namespace, configMap);

return resp.response.statusCode === 201;
return resp.response.statusCode === StatusCodes.CREATED;
} catch (e: Error | any) {
throw new SoloError(
`failed to create configmap ${name} in namespace ${namespace}: ${e.message}, ${e?.body?.message}`,
Expand Down Expand Up @@ -1379,7 +1380,7 @@ export class K8 {
try {
const resp = await this.kubeClient.replaceNamespacedConfigMap(name, namespace, configMap);

return resp.response.statusCode === 201;
return resp.response.statusCode === StatusCodes.CREATED;
} catch (e: Error | any) {
throw new SoloError(
`failed to create configmap ${name} in namespace ${namespace}: ${e.message}, ${e?.body?.message}`,
Expand All @@ -1392,7 +1393,7 @@ export class K8 {
try {
const resp = await this.kubeClient.deleteNamespacedConfigMap(name, namespace);

return resp.response.statusCode === 201;
return resp.response.statusCode === StatusCodes.CREATED;
} catch (e: Error | any) {
throw new SoloError(
`failed to create configmap ${name} in namespace ${namespace}: ${e.message}, ${e?.body?.message}`,
Expand Down Expand Up @@ -1426,10 +1427,10 @@ export class K8 {
async readNamespacedLease(leaseName: string, namespace: string, timesCalled = 0) {
const {response, body} = await this.coordinationApiClient.readNamespacedLease(leaseName, namespace).catch(e => e);

if (response?.statusCode === 500 && timesCalled < 4) {
if (response?.statusCode === StatusCodes.INTERNAL_SERVER_ERROR && timesCalled < 4) {
// could be k8s control plane has no resources available
this.logger.debug(
`Retrying readNamespacedLease(${leaseName}, ${namespace}) in 5 seconds because of statusCode 500`,
`Retrying readNamespacedLease(${leaseName}, ${namespace}) in 5 seconds because of ${getReasonPhrase(StatusCodes.INTERNAL_SERVER_ERROR)}`,
);
await sleep(Duration.ofSeconds(5));
return await this.readNamespacedLease(leaseName, namespace, timesCalled + 1);
Expand Down Expand Up @@ -1484,9 +1485,9 @@ export class K8 {
* @throws SoloError - if the status code is not OK
*/
private handleKubernetesClientError(response: http.IncomingMessage, error: Error | any, errorMessage: string): void {
const statusCode = +response?.statusCode || 500;
const statusCode = +response?.statusCode || StatusCodes.INTERNAL_SERVER_ERROR;

if (statusCode <= 202) return;
if (statusCode <= StatusCodes.ACCEPTED) return;
errorMessage += `, statusCode: ${statusCode}`;
this.logger.error(errorMessage, error);

Expand Down Expand Up @@ -1519,7 +1520,7 @@ export class K8 {
async killPod(podName: string, namespace: string) {
try {
const result = await this.kubeClient.deleteNamespacedPod(podName, namespace, undefined, undefined, 1);
if (result.response.statusCode !== 200) {
if (result.response.statusCode !== StatusCodes.OK) {
throw new SoloError(
`Failed to delete pod ${podName} in namespace ${namespace}: statusCode: ${result.response.statusCode}`,
);
Expand All @@ -1535,7 +1536,7 @@ export class K8 {
}
} catch (e) {
const errorMessage = `Failed to delete pod ${podName} in namespace ${namespace}: ${e.message}`;
if (e.body?.code === 404 || e.response?.body?.code === 404) {
if (e.body?.code === StatusCodes.NOT_FOUND || e.response?.body?.code === StatusCodes.NOT_FOUND) {
this.logger.info(`Pod not found: ${errorMessage}`, e);
return;
}
Expand Down
3 changes: 2 additions & 1 deletion src/core/lease/interval_lease.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import {LeaseAcquisitionError, LeaseRelinquishmentError} from './lease_errors.js
import {sleep} from '../helpers.js';
import {Duration} from '../time/duration.js';
import type {Lease, LeaseRenewalService} from './lease.js';
import {StatusCodes} from 'http-status-codes';

/**
* Concrete implementation of a Kubernetes based time-based mutually exclusive lock via the Coordination API.
Expand Down Expand Up @@ -301,7 +302,7 @@ export class IntervalLease implements Lease {
);
}

if (e.meta.statusCode !== 404) {
if (e.meta.statusCode !== StatusCodes.NOT_FOUND) {
throw new LeaseAcquisitionError(
'failed to read existing leases, unexpected server response of ' + `'${e.meta.statusCode}' received`,
e,
Expand Down
3 changes: 2 additions & 1 deletion src/core/package_downloader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import * as http from 'http';
import {Templates} from './templates.js';
import * as constants from './constants.js';
import {type SoloLogger} from './logging.js';
import {StatusCodes} from 'http-status-codes';

export class PackageDownloader {
constructor(public readonly logger: SoloLogger) {
Expand Down Expand Up @@ -70,7 +71,7 @@ export class PackageDownloader {
},
});
req.destroy();
if ([200, 302].includes(statusCode)) {
if ([StatusCodes.OK, StatusCodes.MOVED_TEMPORARILY].includes(statusCode)) {
resolve(true);
}

Expand Down
3 changes: 2 additions & 1 deletion test/e2e/e2e_node_util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import {ConfigManager} from '../../src/core/config_manager.js';
import {type K8} from '../../src/core/k8.js';
import {type NodeCommand} from '../../src/commands/node/index.js';
import {Duration} from '../../src/core/time/duration.js';
import {StatusCodes} from 'http-status-codes';

export function e2eNodeKeyRefreshTest(testName: string, mode: string, releaseTag = HEDERA_PLATFORM_VERSION_TAG) {
const namespace = testName;
Expand Down Expand Up @@ -112,7 +113,7 @@ export function e2eNodeKeyRefreshTest(testName: string, mode: string, releaseTag
const podName = await nodeRefreshTestSetup(argv, testName, k8, nodeAlias);
if (mode === 'kill') {
const resp = await k8.kubeClient.deleteNamespacedPod(podName, namespace);
expect(resp.response.statusCode).to.equal(200);
expect(resp.response.statusCode).to.equal(StatusCodes.OK);
await sleep(Duration.ofSeconds(20)); // sleep to wait for pod to finish terminating
} else if (mode === 'stop') {
expect(await nodeCmd.handlers.stop(argv)).to.be.true;
Expand Down

0 comments on commit 06e6026

Please sign in to comment.