Skip to content

Commit

Permalink
chore: Use Logger instead of console (#10557)
Browse files Browse the repository at this point in the history
  • Loading branch information
Dschoordsch authored Dec 9, 2024
1 parent 6061003 commit 31ce19b
Show file tree
Hide file tree
Showing 37 changed files with 114 additions and 75 deletions.
7 changes: 4 additions & 3 deletions packages/chronos/chronos.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import {CronJob} from 'cron'
import getGraphQLExecutor from 'parabol-server/utils/getGraphQLExecutor'
import {Logger} from 'parabol-server/utils/Logger'
import publishWebhookGQL from 'parabol-server/utils/publishWebhookGQL'

interface PossibleJob {
Expand Down Expand Up @@ -124,13 +125,13 @@ const chronos = () => {
cronTime: cronTime!,
onTick
})
console.log(`🌱 Chronos Job ${name}: STARTED`)
Logger.log(`🌱 Chronos Job ${name}: STARTED`)
} catch {
console.log(`🌱 Chronos Job ${name}: SKIPPED`)
Logger.log(`🌱 Chronos Job ${name}: SKIPPED`)
}
})

console.log(`\n🌾🌾🌾 Server ID: ${SERVER_ID}. Ready for Chronos 🌾🌾🌾`)
Logger.log(`\n🌾🌾🌾 Server ID: ${SERVER_ID}. Ready for Chronos 🌾🌾🌾`)
}

chronos()
5 changes: 3 additions & 2 deletions packages/embedder/ai_models/OpenAIGeneration.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import OpenAI from 'openai'
import {Logger} from '../../server/utils/Logger'
import {
AbstractGenerationModel,
GenerationModelParams,
Expand Down Expand Up @@ -37,7 +38,7 @@ export class OpenAIGeneration extends AbstractGenerationModel {
async summarize(content: string, options: OpenAIGenerationOptions) {
if (!this.openAIApi) {
const eMsg = 'OpenAI is not configured'
console.log('OpenAIGenerationSummarizer.summarize(): ', eMsg)
Logger.log('OpenAIGenerationSummarizer.summarize(): ', eMsg)
throw new Error(eMsg)
}
const {maxNewTokens: max_tokens = 512, seed, stop, temperature = 0.8, topP: top_p} = options
Expand All @@ -64,7 +65,7 @@ export class OpenAIGeneration extends AbstractGenerationModel {
if (!maybeSummary) throw new Error('OpenAI returned empty summary')
return maybeSummary
} catch (e) {
console.log('OpenAIGenerationSummarizer.summarize(): ', e)
Logger.log('OpenAIGenerationSummarizer.summarize(): ', e)
throw e
}
}
Expand Down
3 changes: 2 additions & 1 deletion packages/embedder/ai_models/TextGenerationInference.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import {Logger} from '../../server/utils/Logger'
import {
AbstractGenerationModel,
GenerationModelParams,
Expand Down Expand Up @@ -52,7 +53,7 @@ export class TextGenerationInference extends AbstractGenerationModel {
throw new Error('TextGenerationInference.summarize(): malformed response')
return json.generated_text as string
} catch (e) {
console.log('TextGenerationInferenceSummarizer.summarize(): timeout')
Logger.log('TextGenerationInferenceSummarizer.summarize(): timeout')
throw e
}
}
Expand Down
8 changes: 5 additions & 3 deletions packages/embedder/ai_models/helpers/fetchWithRetry.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import {Logger} from '../../../server/utils/Logger'

interface FetchWithRetryOptions extends RequestInit {
deadline: Date // Deadline for the request to complete
debug?: boolean // Enable debug tracing
Expand All @@ -22,7 +24,7 @@ export default async (url: RequestInfo, options: FetchWithRetryOptions): Promise
attempt++

if (debug) {
console.log(`Attempt ${attempt}: Fetching ${JSON.stringify(url)}`)
Logger.log(`Attempt ${attempt}: Fetching ${JSON.stringify(url)}`)
}

const response = await fetch(url, fetchOptions)
Expand All @@ -40,7 +42,7 @@ export default async (url: RequestInfo, options: FetchWithRetryOptions): Promise
waitTime = Math.min(waitTime, deadline.getTime() - Date.now())

if (debug) {
console.log(
Logger.log(
`Waiting ${waitTime / 1000} seconds before retrying due to status ${response.status}...`
)
}
Expand All @@ -54,7 +56,7 @@ export default async (url: RequestInfo, options: FetchWithRetryOptions): Promise
throw new Error('Request aborted due to deadline')
}
if (debug) {
console.error(`Attempt ${attempt} failed: ${error}`)
Logger.error(`Attempt ${attempt} failed: ${error}`)
}
const currentTime = Date.now()
if (currentTime >= deadline.getTime()) {
Expand Down
5 changes: 3 additions & 2 deletions packages/embedder/debug.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// call with yarn sucrase-node billing/debug.ts
import '../../scripts/webpack/utils/dotenv'
import getKysely from '../server/postgres/getKysely'
import {Logger} from '../server/utils/Logger'
import {WorkflowOrchestrator} from './WorkflowOrchestrator'

const debugFailedJob = async () => {
Expand All @@ -14,11 +15,11 @@ const debugFailedJob = async () => {
.executeTakeFirst()

if (!failedJob) {
console.log('No failed jobs found')
Logger.log('No failed jobs found')
return
}

console.log('Debugging job:', failedJob.id)
Logger.log('Debugging job:', failedJob.id)
const orch = new WorkflowOrchestrator()
await orch.runStep(failedJob as any)
// const man = getModelManager()
Expand Down
4 changes: 3 additions & 1 deletion packages/embedder/logMemoryUse.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import {Logger} from '../server/utils/Logger'

// Not for use in prod, but useful for dev
export const logMemoryUse = () => {
const MB = 2 ** 20
setInterval(() => {
const memoryUsage = process.memoryUsage()
const {rss} = memoryUsage
const usedMB = Math.floor(rss / MB)
console.log('Memory use:', usedMB, 'MB')
Logger.log('Memory use:', usedMB, 'MB')
}, 10000).unref()
}
7 changes: 4 additions & 3 deletions packages/gql-executor/gqlExecutor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import '../server/monkeyPatchFetch'
import {GQLRequest} from '../server/types/custom'
import RedisInstance from '../server/utils/RedisInstance'
import RedisStream from './RedisStream'
import {Logger} from '../server/utils/Logger'

tracer.init({
service: `gql`,
Expand All @@ -31,14 +32,14 @@ const run = async () => {

// on shutdown, remove consumer from the group
process.on('SIGTERM', async (signal) => {
console.log(`Server ID: ${SERVER_ID}. Kill signal received: ${signal}, starting graceful shutdown.`)
Logger.log(`Server ID: ${SERVER_ID}. Kill signal received: ${signal}, starting graceful shutdown.`)
await publisher.xgroup(
'DELCONSUMER',
ServerChannel.GQL_EXECUTOR_STREAM,
ServerChannel.GQL_EXECUTOR_CONSUMER_GROUP,
executorChannel
)
console.log(`Server ID: ${SERVER_ID}. Graceful shutdown complete, exiting.`)
Logger.log(`Server ID: ${SERVER_ID}. Graceful shutdown complete, exiting.`)
process.exit()
})

Expand Down Expand Up @@ -71,7 +72,7 @@ const run = async () => {
ServerChannel.GQL_EXECUTOR_CONSUMER_GROUP,
executorChannel
)
console.log(`\nπŸ’§πŸ’§πŸ’§ Server ID: ${SERVER_ID}. Ready for GraphQL Execution πŸ’§πŸ’§πŸ’§`)
Logger.log(`\nπŸ’§πŸ’§πŸ’§ Server ID: ${SERVER_ID}. Ready for GraphQL Execution πŸ’§πŸ’§πŸ’§`)

for await (const message of incomingStream) {
// don't await the call below so this instance can immediately call incomingStream.next()
Expand Down
3 changes: 2 additions & 1 deletion packages/server/billing/debug.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
// call with yarn sucrase-node billing/debug.ts
import '../../../scripts/webpack/utils/dotenv'
import {getStripeManager} from '../utils/stripe'
import {Logger} from '../utils/Logger'

const doDebugStuff = async () => {
const manager = getStripeManager()
const res = await manager.updateSubscriptionQuantity('foo', 39, 1597966749)
console.log('res', {res})
Logger.log('res', {res})
}

doDebugStuff()
2 changes: 1 addition & 1 deletion packages/server/dataloader/customLoaderMakers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -880,7 +880,7 @@ export const featureFlagByOwnerId = (parent: RootDataLoader) => {

const missingFeatureNames = featureNames.filter((name) => !existingFeatureNameSet.has(name))
if (missingFeatureNames.length > 0) {
console.error(
Logger.error(
`Feature flag name(s) not found: ${missingFeatureNames.join(', ')}. Add the feature flag name with the addFeatureFlag mutation.`
)
}
Expand Down
3 changes: 2 additions & 1 deletion packages/server/debugJira.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// call with yarn sucrase-node debugJira.ts
import '../../scripts/webpack/utils/dotenv'
import AtlassianServerManager from './utils/AtlassianServerManager'
import {Logger} from './utils/Logger'

const debugJira = async () => {
// const cloudId = "foo"
Expand All @@ -12,7 +13,7 @@ const debugJira = async () => {
if (res instanceof Error) return
const manager = new AtlassianServerManager(res.accessToken)
const screens = await manager.getCloudNameLookup()
console.log(JSON.stringify(screens))
Logger.log(JSON.stringify(screens))
}

debugJira()
5 changes: 3 additions & 2 deletions packages/server/email/MailManagerDebug.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import fs from 'fs'
import {Logger} from '../utils/Logger'
import MailManager, {MailManagerOptions} from './MailManager'

export default class MailManagerDebug extends MailManager {
async sendEmail(options: MailManagerOptions) {
const {to, subject, body} = options
console.warn(`SENDING EMAIL
Logger.warn(`SENDING EMAIL
To: ${to}
Subject: ${subject}
Body: ${body}`)
Expand All @@ -17,7 +18,7 @@ export default class MailManagerDebug extends MailManager {
const folder = '/tmp'
fs.writeFileSync(`${folder}/${filename}`, html)
// make it a link so you can click it in the terminal
console.warn(`Wrote email to file://${folder}/${encodeURIComponent(filename)}`)
Logger.warn(`Wrote email to file://${folder}/${encodeURIComponent(filename)}`)
return true
}
}
4 changes: 3 additions & 1 deletion packages/server/fileStorage/LocalFileStoreManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@ import fs from 'fs'
import makeAppURL from 'parabol-client/utils/makeAppURL'
import path from 'path'
import appOrigin from '../appOrigin'
import {Logger} from '../utils/Logger'
import FileStoreManager from './FileStoreManager'

export default class LocalFileStoreManager extends FileStoreManager {
baseUrl = makeAppURL(appOrigin, 'self-hosted')
constructor() {
Expand Down Expand Up @@ -33,7 +35,7 @@ export default class LocalFileStoreManager extends FileStoreManager {
}

async putBuildFile() {
console.error(
Logger.error(
'Cannot call `putBuildFile` when using Local File Storage. The build files are already there'
)
return ''
Expand Down
3 changes: 2 additions & 1 deletion packages/server/fileStorage/S3FileStoreManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import type {RetryErrorInfo, StandardRetryToken} from '@smithy/types'
import {StandardRetryStrategy} from '@smithy/util-retry'
import mime from 'mime-types'
import path from 'path'
import {Logger} from '../utils/Logger'
import FileStoreManager, {FileAssetDir} from './FileStoreManager'

class CloudflareRetry extends StandardRetryStrategy {
Expand All @@ -14,7 +15,7 @@ class CloudflareRetry extends StandardRetryStrategy {
const status = errorInfo.error?.$response?.statusCode
if (status && status >= 520 && status < 530) {
const date = errorInfo.error?.$response?.headers?.date
console.log('Retrying after Cloudflare error', {
Logger.log('Retrying after Cloudflare error', {
status,
date: date && new Date(date).toISOString(),
path: errorInfo.error?.$response?.body?.req?.path
Expand Down
3 changes: 2 additions & 1 deletion packages/server/graphql/composeResolvers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {defaultFieldResolver} from 'graphql'
import {allow} from 'graphql-shield'
import type {ShieldRule} from 'graphql-shield/dist/types'
import hash from 'object-hash'
import {Logger} from '../utils/Logger'
import {ResolverFn} from './private/resolverTypes'

type Resolver = ResolverFn<any, any, any, any>
Expand Down Expand Up @@ -43,7 +44,7 @@ const wrapResolve =
return res
}
} catch (err) {
console.log(err)
Logger.log(err)
throw err
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import getProjectRoot from '../../../../../scripts/webpack/utils/getProjectRoot'
import getKysely from '../../../postgres/getKysely'
import getPg from '../../../postgres/getPg'
import getPgConfig from '../../../postgres/getPgConfig'
import {Logger} from '../../../utils/Logger'
import {MutationResolvers} from '../resolverTypes'

const exec = util.promisify(childProcess.exec)
Expand All @@ -23,7 +24,7 @@ const dumpPgDataToOrgBackupSchema = async (orgIds: string[]) => {
.where('removedAt', 'is', null)
.execute()
const userIds = orgUsers.map(({userId}) => userId)
console.log({teamIds, userIds})
Logger.log({teamIds, userIds})

// try {
// // do all inserts here
Expand Down
3 changes: 2 additions & 1 deletion packages/server/graphql/private/queries/orgActivities.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {sql} from 'kysely'
import getKysely from '../../../postgres/getKysely'
import {Logger} from '../../../utils/Logger'
import {OrgActivityRow, QueryResolvers} from '../resolverTypes'

const orgActivities: QueryResolvers['orgActivities'] = async (_source, {startDate, endDate}) => {
Expand Down Expand Up @@ -100,7 +101,7 @@ const orgActivities: QueryResolvers['orgActivities'] = async (_source, {startDat
const rows = Object.values(combinedResults)
return {rows}
} catch (error) {
console.error('Error executing Org Activity Report:', error)
Logger.error('Error executing Org Activity Report:', error)
return {error: {message: 'Error executing Org Activity Report'}}
}
}
Expand Down
3 changes: 2 additions & 1 deletion packages/server/hubSpot/backfillHubSpot.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// call with yarn sucrase-node hubSpot/backfillHubSpot.ts
import '../../../scripts/webpack/utils/dotenv'
import {getUsersByEmails} from '../postgres/queries/getUsersByEmails'
import {Logger} from '../utils/Logger'

const contactKeys = {
lastMetAt: 'last_met_at',
Expand Down Expand Up @@ -49,7 +50,7 @@ const upsertHubspotContact = async (
)
if (!String(res.status).startsWith('2')) {
const responseBody = await res.json()
console.error(`Failed to update HubSpot for ${email}: `, responseBody.message)
Logger.error(`Failed to update HubSpot for ${email}: `, responseBody.message)
}
}

Expand Down
3 changes: 2 additions & 1 deletion packages/server/jiraImagesHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import jiraPlaceholder from '../../static/images/illustrations/imageNotFound.png
import sleep from '../client/utils/sleep'
import uWSAsyncHandler from './graphql/uWSAsyncHandler'
import getRedis, {RedisPipelineResponse} from './utils/getRedis'
import {Logger} from './utils/Logger'

const getImageFromCache = async (
imgUrlHash: string,
Expand Down Expand Up @@ -33,7 +34,7 @@ const servePlaceholderImage = async (res: HttpResponse) => {
const res = await fetch(jiraPlaceholder)
jiraPlaceholderBuffer = Buffer.from(await res.arrayBuffer())
} catch (e) {
console.error('Jira Placeholder image could not be fetched', e)
Logger.error('Jira Placeholder image could not be fetched', e)
}
}
res.writeStatus('200').writeHeader('Content-Type', 'image/png').end(jiraPlaceholderBuffer)
Expand Down
5 changes: 3 additions & 2 deletions packages/server/listenHandler.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import {us_listen_socket} from 'uWebSockets.js'
import getGraphQLExecutor from './utils/getGraphQLExecutor'
import {Logger} from './utils/Logger'
import serverHealthChecker from './utils/serverHealthChecker'

const listenHandler = (listenSocket: us_listen_socket) => {
const PORT = Number(__PRODUCTION__ ? process.env.PORT : process.env.SOCKET_PORT)
const SERVER_ID = process.env.SERVER_ID
if (listenSocket) {
console.log(`\nπŸ”₯πŸ”₯πŸ”₯ Server ID: ${SERVER_ID}. Ready for Sockets: Port ${PORT} πŸ”₯πŸ”₯πŸ”₯`)
Logger.log(`\nπŸ”₯πŸ”₯πŸ”₯ Server ID: ${SERVER_ID}. Ready for Sockets: Port ${PORT} πŸ”₯πŸ”₯πŸ”₯`)
getGraphQLExecutor().subscribe()
// Cleaning on startup because shutdowns may be abrupt
serverHealthChecker.cleanUserPresence()
} else {
console.log(`❌❌❌ Port ${PORT} is in use! ❌❌❌`)
Logger.log(`❌❌❌ Port ${PORT} is in use! ❌❌❌`)
}
}

Expand Down
Loading

0 comments on commit 31ce19b

Please sign in to comment.